signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def unget_bytes ( self , string ) :
"""Adds bytes to be internal buffer to be read
This method is for reporting bytes from an in _ stream read
not initiated by this Input object"""
|
self . unprocessed_bytes . extend ( string [ i : i + 1 ] for i in range ( len ( string ) ) )
|
def create_db ( self , models ) :
"""Creates the in - memory SQLite database from the model
configuration ."""
|
# first create the table definitions
self . tables = dict ( [ ( model_name , self . create_model_table ( model ) ) for model_name , model in iteritems ( models ) ] )
# now create the tables in memory
logger . debug ( "Creating %d database table(s)..." , len ( self . tables ) )
try :
self . Base . metadata . create_all ( self . engine )
except Exception as exc :
raise StatikError ( message = "Failed to create in-memory data model." , orig_exc = exc )
self . load_all_model_data ( models )
|
def stdout_encode ( u , default = 'utf-8' ) :
"""Encodes a given string with the proper standard out encoding
If sys . stdout . encoding isn ' t specified , it this defaults to @ default
@ default : default encoding
- > # str with standard out encoding"""
|
# from http : / / stackoverflow . com / questions / 3627793 / best - output - type - and -
# encoding - practices - for - repr - functions
encoding = sys . stdout . encoding or default
return u . encode ( encoding , "replace" ) . decode ( encoding , "replace" )
|
def solveConsKinkyPref ( solution_next , IncomeDstn , PrefShkDstn , LivPrb , DiscFac , CRRA , Rboro , Rsave , PermGroFac , BoroCnstArt , aXtraGrid , vFuncBool , CubicBool ) :
'''Solves a single period of a consumption - saving model with preference shocks
to marginal utility and a different interest rate on saving vs borrowing .
Problem is solved using the method of endogenous gridpoints .
Parameters
solution _ next : ConsumerSolution
The solution to the succeeding one period problem .
IncomeDstn : [ np . array ]
A list containing three arrays of floats , representing a discrete
approximation to the income process between the period being solved
and the one immediately following ( in solution _ next ) . Order : event
probabilities , permanent shocks , transitory shocks .
PrefShkDstn : [ np . array ]
Discrete distribution of the multiplicative utility shifter . Order :
probabilities , preference shocks .
LivPrb : float
Survival probability ; likelihood of being alive at the beginning of
the succeeding period .
DiscFac : float
Intertemporal discount factor for future utility .
CRRA : float
Coefficient of relative risk aversion .
Rboro : float
Interest factor on assets between this period and the succeeding
period when assets are negative .
Rsave : float
Interest factor on assets between this period and the succeeding
period when assets are positive .
PermGroGac : float
Expected permanent income growth factor at the end of this period .
BoroCnstArt : float or None
Borrowing constraint for the minimum allowable assets to end the
period with . If it is less than the natural borrowing constraint ,
then it is irrelevant ; BoroCnstArt = None indicates no artificial bor -
rowing constraint .
aXtraGrid : np . array
Array of " extra " end - of - period asset values - - assets above the
absolute minimum acceptable level .
vFuncBool : boolean
An indicator for whether the value function should be computed and
included in the reported solution .
CubicBool : boolean
An indicator for whether the solver should use cubic or linear inter -
polation .
Returns
solution : ConsumerSolution
The solution to the single period consumption - saving problem . Includes
a consumption function cFunc ( using linear splines ) , a marginal value
function vPfunc , a minimum acceptable level of normalized market re -
sources mNrmMin , normalized human wealth hNrm , and bounding MPCs MPCmin
and MPCmax . It might also have a value function vFunc . The consumption
function is defined over normalized market resources and the preference
shock , c = cFunc ( m , PrefShk ) , but the ( marginal ) value function is defined
unconditionally on the shock , just before it is revealed .'''
|
solver = ConsKinkyPrefSolver ( solution_next , IncomeDstn , PrefShkDstn , LivPrb , DiscFac , CRRA , Rboro , Rsave , PermGroFac , BoroCnstArt , aXtraGrid , vFuncBool , CubicBool )
solver . prepareToSolve ( )
solution = solver . solve ( )
return solution
|
def _init_map ( self ) :
"""stub"""
|
self . my_osid_object_form . _my_map [ 'learningObjectiveId' ] = str ( self . _learning_objective_id_metadata [ 'default_id_values' ] [ 0 ] )
self . my_osid_object_form . _my_map [ 'minimumProficiency' ] = str ( self . _minimum_proficiency_metadata [ 'default_id_values' ] [ 0 ] )
|
def set_brightness ( self , brightness , duration = 0 , rapid = False ) :
"""brightness to set
duration in ms"""
|
color = self . get_color ( )
color2 = ( color [ 0 ] , color [ 1 ] , brightness , color [ 3 ] )
try :
if rapid :
self . fire_and_forget ( LightSetColor , { "color" : color2 , "duration" : duration } , num_repeats = 1 )
else :
self . req_with_ack ( LightSetColor , { "color" : color2 , "duration" : duration } )
except WorkflowException as e :
raise
|
def file_open ( self , fn ) :
"""Yields the opening text of a file section in multipart HTTP .
Parameters
fn : str
Filename for the file being opened and added to the HTTP body"""
|
yield b'--'
yield self . boundary . encode ( )
yield CRLF
headers = content_disposition ( fn )
headers . update ( content_type ( fn ) )
for c in self . _write_headers ( headers ) :
yield c
|
def transliterate ( string ) :
"""Replace non - ASCII characters with an ASCII approximation . If no
approximation exists , the non - ASCII character is ignored . The string must
be ` ` unicode ` ` .
Examples : :
> > > transliterate ( u ' älämölö ' )
u ' alamolo '
> > > transliterate ( u ' Ærøskøbing ' )
u ' rskbing '"""
|
try :
normalized = unicodedata . normalize ( 'NFKD' , unicode ( string ) )
except NameError :
normalized = unicodedata . normalize ( 'NFKD' , string )
return normalized . encode ( 'ascii' , 'ignore' ) . decode ( 'ascii' )
|
def parse_time_indices ( s ) :
"""Parse a string as time indices .
Args :
s : A valid slicing string for time indices . E . g . , ' - 1 ' , ' [ : ] ' , ' : ' , ' 2:10'
Returns :
A slice object .
Raises :
ValueError : If ` s ` does not represent valid time indices ."""
|
if not s . startswith ( '[' ) :
s = '[' + s + ']'
parsed = command_parser . _parse_slices ( s )
if len ( parsed ) != 1 :
raise ValueError ( 'Invalid number of slicing objects in time indices (%d)' % len ( parsed ) )
else :
return parsed [ 0 ]
|
def set_yticklabels_position ( self , row , column , position ) :
"""Specify the position of the axis tick labels .
This is generally only useful for multiplots containing only one
column . This can be used to e . g . alternatively draw the tick
labels on the left or the right of the subplot .
: param row , column : specify the subplot .
: param position : ' left ' or ' right ' to specify the position of the
tick labels ."""
|
subplot = self . get_subplot_at ( row , column )
subplot . set_yticklabels_position ( position )
|
def associate ( self , floating_ip_id , port_id ) :
"""Associates the floating IP to the port .
` ` port _ id ` ` represents a VNIC of an instance .
` ` port _ id ` ` argument is different from a normal neutron port ID .
A value passed as ` ` port _ id ` ` must be one of target _ id returned by
` ` list _ targets ` ` , ` ` get _ target _ by _ instance ` ` or
` ` list _ targets _ by _ instance ` ` method ."""
|
# NOTE : In Neutron Horizon floating IP support , port _ id is
# " < port _ id > _ < ip _ address > " format to identify multiple ports .
pid , ip_address = port_id . split ( '_' , 1 )
update_dict = { 'port_id' : pid , 'fixed_ip_address' : ip_address }
self . client . update_floatingip ( floating_ip_id , { 'floatingip' : update_dict } )
|
def refresh ( self ) :
"""Reload the current page with the same request as originally done .
Any change ( ` select _ form ` , or any value filled - in in the form ) made to
the current page before refresh is discarded .
: raise ValueError : Raised if no refreshable page is loaded , e . g . , when
using the shallow ` ` Browser ` ` wrapper functions .
: return : Response of the request ."""
|
old_request = self . __state . request
if old_request is None :
raise ValueError ( 'The current page is not refreshable. Either no ' 'page is opened or low-level browser methods ' 'were used to do so' )
resp = self . session . send ( old_request )
Browser . add_soup ( resp , self . soup_config )
self . __state = _BrowserState ( page = resp . soup , url = resp . url , request = resp . request )
return resp
|
def _snip_whitespace ( self , text ) :
"""* snip the whitespace at the start and end of the text *
* * Key Arguments : * *
- ` ` text ` ` - - the text to snip
* * Return : * *
- ` ` prefix ` ` , ` ` text ` ` , ` ` suffix ` ` - - the starting whitespace , text and endding whitespace"""
|
self . log . debug ( 'starting the ``_snip_whitespace`` method' )
m = self . reWS . match ( text )
prefix = m . group ( 1 )
text = m . group ( 2 )
suffix = m . group ( 3 )
self . log . debug ( 'completed the ``_snip_whitespace`` method' )
return prefix , text , suffix
|
def _ecc_static_length_signature ( key , algorithm , digest ) :
"""Calculates an elliptic curve signature with a static length using pre - calculated hash .
: param key : Elliptic curve private key
: type key : cryptography . hazmat . primitives . asymmetric . ec . EllipticCurvePrivateKey
: param algorithm : Master algorithm to use
: type algorithm : aws _ encryption _ sdk . identifiers . Algorithm
: param bytes digest : Pre - calculated hash digest
: returns : Signature with required length
: rtype : bytes"""
|
pre_hashed_algorithm = ec . ECDSA ( Prehashed ( algorithm . signing_hash_type ( ) ) )
signature = b""
while len ( signature ) != algorithm . signature_len :
_LOGGER . debug ( "Signature length %d is not desired length %d. Recalculating." , len ( signature ) , algorithm . signature_len )
signature = key . sign ( digest , pre_hashed_algorithm )
if len ( signature ) != algorithm . signature_len : # Most of the time , a signature of the wrong length can be fixed
# by negating s in the signature relative to the group order .
_LOGGER . debug ( "Signature length %d is not desired length %d. Negating s." , len ( signature ) , algorithm . signature_len )
r , s = decode_dss_signature ( signature )
s = _ECC_CURVE_PARAMETERS [ algorithm . signing_algorithm_info . name ] . order - s
signature = encode_dss_signature ( r , s )
return signature
|
def distance ( f1 , f2 ) :
"""Distance between 2 features . The integer result is always positive or zero .
If the features overlap or touch , it is zero .
> > > from intersecter import Feature , distance
> > > distance ( Feature ( 1 , 2 ) , Feature ( 12 , 13 ) )
10
> > > distance ( Feature ( 1 , 2 ) , Feature ( 2 , 3 ) )
> > > distance ( Feature ( 1 , 100 ) , Feature ( 20 , 30 ) )"""
|
if f1 . end < f2 . start :
return f2 . start - f1 . end
if f2 . end < f1 . start :
return f1 . start - f2 . end
return 0
|
def request ( self , method , url , * args ) :
"""Pass - thru method to make this class behave a little like HTTPConnection"""
|
return self . http . request ( method , url , * args )
|
def enable_asynchronous ( self ) :
"""Check if socket have been monkey patched by gevent"""
|
def is_monkey_patched ( ) :
try :
from gevent import monkey , socket
except ImportError :
return False
if hasattr ( monkey , "saved" ) :
return "socket" in monkey . saved
return gevent . socket . socket == socket . socket
if not is_monkey_patched ( ) :
raise Exception ( "To activate asynchonoucity, please monkey patch" " the socket module with gevent" )
return True
|
def _create_single_taskpaper_task_list ( self , content , setName ) :
"""* create single , sorted taskpaper task list from content pulled in from all of the workspace taskpaper docs *
* * Key Arguments : * *
- ` ` content ` ` - - the content to add to the taskpaper task index
- ` ` setName ` ` - - the name of the sync tag set
* * Return : * *
- ` ` taskpaperDocPath ` ` - - path to the task index taskpaper doc"""
|
self . log . info ( 'starting the ``_create_single_taskpaper_task_list`` method' )
taskpaperDocPath = None
if len ( content ) : # content = content . decode ( " utf - 8 " )
if self . editorialRootPath :
taskpaperDocPath = self . syncFolder + "/e-" + self . workspaceName + "-" + setName + "-tasks.taskpaper"
else :
taskpaperDocPath = self . syncFolder + "/" + self . workspaceName + "-" + setName + "-tasks.taskpaper"
try :
self . log . debug ( "attempting to open the file %s" % ( taskpaperDocPath , ) )
writeFile = codecs . open ( taskpaperDocPath , encoding = 'utf-8' , mode = 'w' )
except IOError , e :
message = 'could not open the file %s' % ( taskpaperDocPath , )
self . log . critical ( message )
raise IOError ( message )
writeFile . write ( content )
writeFile . close ( )
# OPEN TASKPAPER FILE
if self . editorialRootPath :
doc = document ( self . syncFolder + "/e-" + self . workspaceName + "-" + setName + "-tasks.taskpaper" )
else :
doc = document ( self . syncFolder + "/" + self . workspaceName + "-" + setName + "-tasks.taskpaper" )
doc . sort_projects ( workflowTags = self . workflowTags )
doc . sort_tasks ( workflowTags = self . workflowTags )
doc . save ( )
self . log . info ( 'completed the ``_create_single_taskpaper_task_list`` method' )
return taskpaperDocPath
|
def coord ( self , func : CoordFunc , * args , ** kwargs ) -> 'Image' :
"Equivalent to ` image . flow = func ( image . flow , image . size ) ` ."
|
self . flow = func ( self . flow , * args , ** kwargs )
return self
|
def resolve_aliases ( self , target , scope = None ) :
"""Resolve aliases in the direct dependencies of the target .
: param target : The direct dependencies of this target are included .
: param scope : When specified , only deps with this scope are included . This is more
than a filter , because it prunes the subgraphs represented by aliases with
un - matched scopes .
: returns : An iterator of ( resolved _ dependency , resolved _ from ) tuples .
` resolved _ from ` is the top level target alias that depends on ` resolved _ dependency ` ,
and ` None ` if ` resolved _ dependency ` is not a dependency of a target alias ."""
|
for declared in target . dependencies :
if scope is not None and declared . scope != scope : # Only ` DEFAULT ` scoped deps are eligible for the unused dep check .
continue
elif type ( declared ) in ( AliasTarget , Target ) : # Is an alias . Recurse to expand .
for r , _ in self . resolve_aliases ( declared , scope = scope ) :
yield r , declared
else :
yield declared , None
|
def linkify_one_command_with_commands ( self , commands , prop ) :
"""Link a command to a property ( check _ command for example )
: param commands : commands object
: type commands : alignak . objects . command . Commands
: param prop : property name
: type prop : str
: param default : default command to use if the property is not defined
: type default : str
: return : None"""
|
for i in self :
command = getattr ( i , prop , '' ) . strip ( )
if command :
setattr ( i , prop , self . create_commandcall ( i , commands , command ) )
else : # No defined command
setattr ( i , prop , None )
|
def convert_logistic_regression_output ( node , ** kwargs ) :
"""Map MXNet ' s SoftmaxOutput operator attributes to onnx ' s Softmax operator
and return the created node ."""
|
name = node [ "name" ]
input1_idx = kwargs [ "index_lookup" ] [ node [ "inputs" ] [ 0 ] [ 0 ] ]
input1 = kwargs [ "proc_nodes" ] [ input1_idx ]
sigmoid_node = onnx . helper . make_node ( "Sigmoid" , [ input1 . name ] , [ name ] , name = name )
return [ sigmoid_node ]
|
def from_payload ( self , payload ) :
"""Init frame from binary data ."""
|
self . session_id = payload [ 0 ] * 256 + payload [ 1 ]
self . index_id = payload [ 2 ]
self . node_parameter = payload [ 3 ]
self . seconds = payload [ 4 ] * 256 + payload [ 5 ]
|
def system ( self ) -> 'EFBChat' :
"""Set the chat as a system chat .
Only set for channel - level and group - level system chats .
Returns :
EFBChat : This object ."""
|
self . chat_name = "System"
self . chat_alias = None
self . chat_uid = EFBChat . SYSTEM_ID
self . chat_type = ChatType . System
return self
|
def unique_dimkeys ( obj , default_dim = 'Frame' ) :
"""Finds all common dimension keys in the object including subsets of
dimensions . If there are is no common subset of dimensions , None
is returned .
Returns the list of dimensions followed by the list of unique
keys ."""
|
from . ndmapping import NdMapping , item_check
from . spaces import HoloMap
key_dims = obj . traverse ( lambda x : ( tuple ( x . kdims ) , list ( x . data . keys ( ) ) ) , ( HoloMap , ) )
if not key_dims :
return [ Dimension ( default_dim ) ] , [ ( 0 , ) ]
dim_groups , keys = zip ( * sorted ( key_dims , key = lambda x : - len ( x [ 0 ] ) ) )
dgroups = [ frozenset ( d . name for d in dg ) for dg in dim_groups ]
subset = all ( g1 <= g2 or g1 >= g2 for g1 in dgroups for g2 in dgroups )
# Find unique keys
if subset :
dims = merge_dimensions ( dim_groups )
all_dims = sorted ( dims , key = lambda x : dim_groups [ 0 ] . index ( x ) )
else : # Handle condition when HoloMap / DynamicMap dimensions do not overlap
hmaps = obj . traverse ( lambda x : x , [ 'HoloMap' ] )
if hmaps :
raise ValueError ( 'When combining HoloMaps into a composite plot ' 'their dimensions must be subsets of each other.' )
dimensions = merge_dimensions ( dim_groups )
dim_keys = { }
for dims , keys in key_dims :
for key in keys :
for d , k in zip ( dims , key ) :
dim_keys [ d . name ] = k
if dim_keys :
keys = [ tuple ( dim_keys . get ( dim . name ) for dim in dimensions ) ]
else :
keys = [ ]
return merge_dimensions ( dim_groups ) , keys
ndims = len ( all_dims )
unique_keys = [ ]
for group , keys in zip ( dim_groups , keys ) :
dim_idxs = [ all_dims . index ( dim ) for dim in group ]
for key in keys :
padded_key = create_ndkey ( ndims , dim_idxs , key )
matches = [ item for item in unique_keys if padded_key == tuple ( k if k is None else i for i , k in zip ( item , padded_key ) ) ]
if not matches :
unique_keys . append ( padded_key )
with item_check ( False ) :
sorted_keys = NdMapping ( { key : None for key in unique_keys } , kdims = all_dims ) . data . keys ( )
return all_dims , list ( sorted_keys )
|
def to_tf_matrix ( expression_matrix , gene_names , tf_names ) :
""": param expression _ matrix : numpy matrix . Rows are observations and columns are genes .
: param gene _ names : a list of gene names . Each entry corresponds to the expression _ matrix column with same index .
: param tf _ names : a list of transcription factor names . Should be a subset of gene _ names .
: return : tuple of :
0 : A numpy matrix representing the predictor matrix for the regressions .
1 : The gene names corresponding to the columns in the predictor matrix ."""
|
tuples = [ ( index , gene ) for index , gene in enumerate ( gene_names ) if gene in tf_names ]
tf_indices = [ t [ 0 ] for t in tuples ]
tf_matrix_names = [ t [ 1 ] for t in tuples ]
return expression_matrix [ : , tf_indices ] , tf_matrix_names
|
def check_events ( self ) :
'''check for events , calling registered callbacks as needed'''
|
while self . event_count ( ) > 0 :
event = self . get_event ( )
for callback in self . _callbacks :
callback ( event )
|
def run ( self ) :
"""Parse the script file .
: rtype : : py : class : ` ~ turberfield . dialogue . model . Model `"""
|
model = Model ( self . fP , self . doc )
self . doc . walkabout ( model )
return model
|
def on_data ( self , raw_data ) :
"""Called when raw data is received from connection .
Override this method if you wish to manually handle
the stream data . Return False to stop stream and close connection ."""
|
data = json . loads ( raw_data )
message_type = data [ 'meta' ] . get ( 'type' )
prepare_method = 'prepare_%s' % ( message_type )
args = getattr ( self , prepare_method , self . prepare_fallback ) ( data . get ( 'data' ) )
method_name = 'on_%s' % ( message_type , )
func = getattr ( self , method_name , self . on_fallback )
func ( * args , meta = StreamingMeta . from_response_data ( data . get ( 'meta' ) , self . api ) )
|
def num_lines ( self ) :
"""Lazy evaluation of the number of lines .
Returns None for stdin input currently ."""
|
if self . from_stdin :
return None
if not self . _num_lines :
self . _iterate_lines ( )
return self . _num_lines
|
def _setup_xauth ( self ) :
'''Set up the Xauthority file and the XAUTHORITY environment variable .'''
|
handle , filename = tempfile . mkstemp ( prefix = 'PyVirtualDisplay.' , suffix = '.Xauthority' )
self . _xauth_filename = filename
os . close ( handle )
# Save old environment
self . _old_xauth = { }
self . _old_xauth [ 'AUTHFILE' ] = os . getenv ( 'AUTHFILE' )
self . _old_xauth [ 'XAUTHORITY' ] = os . getenv ( 'XAUTHORITY' )
os . environ [ 'AUTHFILE' ] = os . environ [ 'XAUTHORITY' ] = filename
cookie = xauth . generate_mcookie ( )
xauth . call ( 'add' , self . new_display_var , '.' , cookie )
|
def find_max_neg ( input_list : list ) -> int :
"""This function identifies the largest negative number in the given list .
Args :
input _ list : A list of integers
Returns :
An integer that represents the largest negative number in the list
Examples :
> > > find _ max _ neg ( [ 1,2,3 , - 4 , - 6 ] )
> > > find _ max _ neg ( [ 1,2,3 , - 8 , - 9 ] )
> > > find _ max _ neg ( [ 1,2,3,4 , - 1 ] )"""
|
largest_negative = input_list [ 0 ]
for num in input_list :
if num < largest_negative :
largest_negative = num
return largest_negative
|
def create_app ( name , config = None , flask_params = None ) :
"""Create app
Generalized way of creating a flask app . Use it in your concrete apps and
do further configuration there : add app - specific options , extensions ,
listeners and other features .
Note : application name should be its fully qualified _ _ name _ _ , something
like project . api . app . This is how we fetch routing settings ."""
|
from boiler . config import DefaultConfig
if config is None :
config = DefaultConfig ( )
# get flask parameters
options = dict ( import_name = name )
if flask_params is not None :
options . update ( flask_params )
if config . get ( 'FLASK_STATIC_URL' ) is not None :
options [ 'static_url_path' ] = config . get ( 'FLASK_STATIC_URL' )
if config . get ( 'FLASK_STATIC_PATH' ) is not None :
options [ 'static_folder' ] = config . get ( 'FLASK_STATIC_PATH' )
# create an app
app = Flask ( ** options )
# configure app
if config . __class__ is type :
raise Exception ( 'Config must be an object, got class instead.' )
app . config . from_object ( DefaultConfig ( ) )
app . config . from_object ( config )
# register error handler
register_error_handler ( app )
# use kernel templates
kernel_templates_path = path . realpath ( path . dirname ( __file__ ) + '/templates' )
fallback_loader = FileSystemLoader ( [ kernel_templates_path ] )
custom_loader = ChoiceLoader ( [ app . jinja_loader , fallback_loader ] )
app . jinja_loader = custom_loader
# time restarts ?
if app . config . get ( 'TIME_RESTARTS' ) :
restart_timer . time_restarts ( os . path . join ( os . getcwd ( ) , 'var' , 'data' ) )
# detect browsersync proxy
@ app . before_request
def detect_browsersync ( ) :
g . dev_proxy = False
proxy_header = app . config . get ( 'DEV_PROXY_HEADER' )
if proxy_header :
g . dev_proxy = bool ( request . headers . get ( proxy_header ) )
return app
|
def start ( self ) :
"""Create a background thread for httpd and serve ' forever '"""
|
self . _process = threading . Thread ( target = self . _background_runner )
self . _process . start ( )
|
def Clone ( self ) :
"""Clone self .
Returns :
AccountState :"""
|
return AccountState ( self . ScriptHash , self . IsFrozen , self . Votes , self . Balances )
|
def serve_http ( self ) :
"""serve _ http serves the Prometheus endpoint ."""
|
start_http_server ( port = self . options . port , addr = str ( self . options . address ) )
|
def fen ( self , * , shredder : bool = False , en_passant : str = "legal" , promoted : Optional [ bool ] = None ) -> str :
"""Gets a FEN representation of the position .
A FEN string ( e . g . ,
` ` rnbqkbnr / ppppp / 8/8/8/8 / PPPPP / RNBQKBNR w KQkq - 0 1 ` ` ) consists
of the position part : func : ` ~ chess . Board . board _ fen ( ) ` , the
: data : ` ~ chess . Board . turn ` , the castling part
( : data : ` ~ chess . Board . castling _ rights ` ) ,
the en passant square ( : data : ` ~ chess . Board . ep _ square ` ) ,
the : data : ` ~ chess . Board . halfmove _ clock `
and the : data : ` ~ chess . Board . fullmove _ number ` .
: param shredder : Use : func : ` ~ chess . Board . castling _ shredder _ fen ( ) `
and encode castling rights by the file of the rook
( like ` ` HAha ` ` ) instead of the default
: func : ` ~ chess . Board . castling _ xfen ( ) ` ( like ` ` KQkq ` ` ) .
: param en _ passant : By default , only fully legal en passant squares
are included ( : func : ` ~ chess . Board . has _ legal _ en _ passant ( ) ` ) .
Pass ` ` fen ` ` to strictly follow the FEN specification
( always include the en passant square after a two - step pawn move )
or ` ` xfen ` ` to follow the X - FEN specification
( : func : ` ~ chess . Board . has _ pseudo _ legal _ en _ passant ( ) ` ) .
: param promoted : Mark promoted pieces like ` ` Q ~ ` ` . By default , this is
only enabled in chess variants where this is relevant ."""
|
return " " . join ( [ self . epd ( shredder = shredder , en_passant = en_passant , promoted = promoted ) , str ( self . halfmove_clock ) , str ( self . fullmove_number ) ] )
|
def rotation_matrix ( self , angles ) :
"""Return the rotation matrix to the system state at ` ` angles ` ` .
Parameters
angles : ` array - like ` or sequence
Euler angles in radians describing the rotation of the detector .
The length of the provided argument ( along the first axis in
case of an array ) must be equal to the number of Euler angles
in this geometry .
Returns
rot : ` numpy . ndarray `
Rotation matrix ( or matrices ) mapping vectors at the
initial state to the ones in the state defined by ` ` angles ` ` .
The rotation is extrinsic , i . e . , defined in the " world "
coordinate system .
If ` ` angles ` ` is a single pair ( or triplet ) of Euler angles ,
an array of shape ` ` ( 3 , 3 ) ` ` representing a single matrix is
returned . Otherwise , the shape of the returned array is
` ` broadcast ( * angles ) . shape + ( 3 , 3 ) ` ` ."""
|
squeeze_out = ( np . broadcast ( * angles ) . shape == ( ) )
angles_in = angles
angles = tuple ( np . array ( angle , dtype = float , copy = False , ndmin = 1 ) for angle in angles )
if ( self . check_bounds and not is_inside_bounds ( angles , self . motion_params ) ) :
raise ValueError ( '`angles` {} not in the valid range ' '{}' . format ( angles_in , self . motion_params ) )
matrix = euler_matrix ( * angles )
if squeeze_out :
matrix = matrix . squeeze ( )
return matrix
|
def addresses_from_address_families ( address_mapper , specs ) :
"""Given an AddressMapper and list of Specs , return matching BuildFileAddresses .
: raises : : class : ` ResolveError ` if :
- there were no matching AddressFamilies , or
- the Spec matches no addresses for SingleAddresses .
: raises : : class : ` AddressLookupError ` if no targets are matched for non - SingleAddress specs ."""
|
# Capture a Snapshot covering all paths for these Specs , then group by directory .
snapshot = yield Get ( Snapshot , PathGlobs , _spec_to_globs ( address_mapper , specs ) )
dirnames = { dirname ( f ) for f in snapshot . files }
address_families = yield [ Get ( AddressFamily , Dir ( d ) ) for d in dirnames ]
address_family_by_directory = { af . namespace : af for af in address_families }
matched_addresses = OrderedSet ( )
for spec in specs : # NB : if a spec is provided which expands to some number of targets , but those targets match
# - - exclude - target - regexp , we do NOT fail ! This is why we wait to apply the tag and exclude
# patterns until we gather all the targets the spec would have matched without them .
try :
addr_families_for_spec = spec . matching_address_families ( address_family_by_directory )
except Spec . AddressFamilyResolutionError as e :
raise raise_from ( ResolveError ( e ) , e )
try :
all_addr_tgt_pairs = spec . address_target_pairs_from_address_families ( addr_families_for_spec )
except Spec . AddressResolutionError as e :
raise raise_from ( AddressLookupError ( e ) , e )
except SingleAddress . _SingleAddressResolutionError as e :
_raise_did_you_mean ( e . single_address_family , e . name , source = e )
matched_addresses . update ( addr for ( addr , tgt ) in all_addr_tgt_pairs if specs . matcher . matches_target_address_pair ( addr , tgt ) )
# NB : This may be empty , as the result of filtering by tag and exclude patterns !
yield BuildFileAddresses ( tuple ( matched_addresses ) )
|
def submit_recording ( raw_data_json ) :
"""Submit a recording to the database on write - math . com .
Parameters
raw _ data _ json : str
Raw data in JSON format
Raises
requests . exceptions . ConnectionError
If the internet connection is lost ."""
|
url = "http://www.martin-thoma.de/write-math/classify/index.php"
headers = { 'User-Agent' : 'Mozilla/5.0' , 'Content-Type' : 'application/x-www-form-urlencoded' }
payload = { 'drawnJSON' : raw_data_json }
s = requests . Session ( )
req = requests . Request ( 'POST' , url , headers = headers , data = payload )
prepared = req . prepare ( )
s . send ( prepared )
|
def clean_previous_run ( self ) :
"""Clean variables from previous configuration ,
such as schedulers , broks and external commands
: return : None"""
|
# Clean all lists
self . arbiters . clear ( )
self . schedulers . clear ( )
with self . external_commands_lock :
self . external_commands = self . external_commands [ : ]
|
def parse_args_to_action_args ( self , argv = None ) :
'''Parses args and returns an action and the args that were parsed'''
|
args = self . parse_args ( argv )
action = self . subcommands [ args . subcommand ] [ 1 ]
return action , args
|
def get_labels ( obj ) :
"""Retrieve the labels of a clustering . rst object
: param obj : the clustering . rst object
: return : the resulting labels"""
|
if Clustering . is_pyclustering_instance ( obj . model ) :
return obj . _labels_from_pyclusters
else :
return obj . model . labels_
|
def get_config ( ) :
"""Get the configuration from file"""
|
if CONFIG_FILE is not None :
configfile = CONFIG_FILE
else :
configfile = BUILTIN_CONFIG_FILE
config = { }
with open ( configfile , 'r' ) as fp_ :
config = recursive_dict_update ( config , yaml . load ( fp_ , Loader = UnsafeLoader ) )
app_dirs = AppDirs ( 'pyspectral' , 'pytroll' )
user_datadir = app_dirs . user_data_dir
config [ 'rsr_dir' ] = expanduser ( config . get ( 'rsr_dir' , user_datadir ) )
config [ 'rayleigh_dir' ] = expanduser ( config . get ( 'rayleigh_dir' , user_datadir ) )
return config
|
def fc_to_features ( dataset ) :
"""converts a dataset to a list of feature objects
Input :
dataset - path to table or feature class
Output :
list of feature objects"""
|
if arcpyFound :
desc = arcpy . Describe ( dataset )
fields = [ field . name for field in arcpy . ListFields ( dataset ) if field . type not in [ 'Geometry' ] ]
date_fields = [ field . name for field in arcpy . ListFields ( dataset ) if field . type == 'Date' ]
non_geom_fields = copy . deepcopy ( fields )
features = [ ]
if hasattr ( desc , "shapeFieldName" ) :
fields . append ( "SHAPE@JSON" )
del desc
with arcpy . da . SearchCursor ( dataset , fields ) as rows :
for row in rows :
row = list ( row )
for df in date_fields :
if row [ fields . index ( df ) ] != None :
row [ fields . index ( df ) ] = int ( ( _date_handler ( row [ fields . index ( df ) ] ) ) )
template = { "attributes" : dict ( zip ( non_geom_fields , row ) ) }
if "SHAPE@JSON" in fields :
template [ 'geometry' ] = json . loads ( row [ fields . index ( "SHAPE@JSON" ) ] )
features . append ( Feature ( json_string = _unicode_convert ( template ) ) )
del row
return features
return None
|
def lambda_handler ( event , context ) :
'''Demonstrates a simple HTTP endpoint using API Gateway . You have full
access to the request and response payload , including headers and
status code .
To scan a DynamoDB table , make a GET request with the TableName as a
query string parameter . To put , update , or delete an item , make a POST ,
PUT , or DELETE request respectively , passing in the payload to the
DynamoDB API as a JSON body .'''
|
# print ( " Received event : " + json . dumps ( event , indent = 2 ) )
operations = { 'DELETE' : lambda dynamo , x : dynamo . delete_item ( ** x ) , 'GET' : lambda dynamo , x : dynamo . scan ( ** x ) , 'POST' : lambda dynamo , x : dynamo . put_item ( ** x ) , 'PUT' : lambda dynamo , x : dynamo . update_item ( ** x ) , }
operation = event [ 'httpMethod' ]
if operation in operations :
payload = event [ 'queryStringParameters' ] if operation == 'GET' else json . loads ( event [ 'body' ] )
return respond ( None , operations [ operation ] ( dynamo , payload ) )
else :
return respond ( ValueError ( 'Unsupported method "{}"' . format ( operation ) ) )
|
def FDMT_initialization ( datain , f_min , f_max , maxDT , dataType ) :
"""Input : datain - visibilities of ( nint , nbl , nchan , npol )
f _ min , f _ max - are the base - band begin and end frequencies .
The frequencies can be entered in both MHz and GHz , units are factored out in all uses .
maxDT - the maximal delay ( in time bins ) of the maximal dispersion .
Appears in the paper as N _ { \ Delta }
A typical input is maxDT = N _ f
dataType - To naively use FFT , one must use floating point types .
Due to casting , use either complex64 or complex128.
Output : dataout , 3d array , with dimensions [ nint , N _ d0 , nbl , nchan , npol ]
where N _ d0 is the maximal number of bins the dispersion curve travels at one frequency bin
For details , see algorithm 1 in Zackay & Ofek ( 2014)"""
|
# Data initialization is done prior to the first FDMT iteration
# See Equations 17 and 19 in Zackay & Ofek ( 2014)
[ nint , nbl , nchan , npol ] = datain . shape
deltaF = ( f_max - f_min ) / float ( nchan )
deltaT = int ( np . ceil ( ( maxDT - 1 ) * ( 1. / f_min ** 2 - 1. / ( f_min + deltaF ) ** 2 ) / ( 1. / f_min ** 2 - 1. / f_max ** 2 ) ) )
dataout = np . zeros ( [ nint , deltaT + 1 , nbl , nchan , npol ] , dataType )
dataout [ : , 0 , : , : , : ] = datain
for i_dT in xrange ( 1 , deltaT + 1 ) :
dataout [ i_dT : , i_dT , : , : , : ] = dataout [ i_dT : , i_dT - 1 , : , : , : ] + datain [ : - i_dT ]
return dataout
|
def create ( self ) :
"""Create the local repository ( if it doesn ' t already exist ) .
: returns : : data : ` True ` if the local repository was just created ,
: data : ` False ` if it already existed .
What : func : ` create ( ) ` does depends on the situation :
- When : attr : ` exists ` is : data : ` True ` nothing is done .
- When the : attr : ` local ` repository doesn ' t exist but a : attr : ` remote `
repository location is given , a clone of the remote repository is
created .
- When the : attr : ` local ` repository doesn ' t exist and no : attr : ` remote `
repository has been specified then a new local repository will be
created .
When : func : ` create ( ) ` is responsible for creating the : attr : ` local `
repository it will make sure the : attr : ` bare ` option is respected ."""
|
if self . exists :
logger . debug ( "Local %s repository (%s) already exists, ignoring request to create it." , self . friendly_name , format_path ( self . local ) )
return False
else :
timer = Timer ( )
if self . remote :
logger . info ( "Creating local %s repository (%s) by cloning %s .." , self . friendly_name , format_path ( self . local ) , self . remote )
else :
logger . info ( "Creating local %s repository (%s) .." , self . friendly_name , format_path ( self . local ) )
self . context . execute ( * self . get_create_command ( ) )
logger . debug ( "Took %s to %s local %s repository." , timer , "clone" if self . remote else "create" , self . friendly_name )
if self . remote :
self . mark_updated ( )
# Ensure that all further commands are executed in the local repository .
self . update_context ( )
return True
|
def get_traceback_stxt ( ) :
"""Result is ( bytes ) str type on Python 2 and ( unicode ) str type on Python 3."""
|
exc_cls , exc_obj , tb_obj = sys . exc_info ( )
txt_s = traceback . format_exception ( exc_cls , exc_obj , tb_obj )
res = '' . join ( txt_s )
return res
|
def _compile ( cls , lines ) :
'''Return the filename from the current line .'''
|
m = cls . RE_EXTEND . match ( lines . current )
if m is None :
raise DefineBlockError ( '''Incorrect block definition at line {}, {}
Should be something like: #extend path/foo.html:''' . format ( lines . pos , lines . current ) )
return m . group ( 1 )
|
def record ( file_path , topic_names = [ ] , host = jps . env . get_master_host ( ) , sub_port = jps . DEFAULT_SUB_PORT ) :
'''record the topic data to the file'''
|
class TopicRecorder ( object ) :
def __init__ ( self , file_path , topic_names ) :
self . _topic_names = topic_names
self . _file_path = file_path
self . _output = open ( self . _file_path , 'w' )
signal . signal ( signal . SIGINT , self . _handle_signal )
signal . signal ( signal . SIGTERM , self . _handle_signal )
header = { }
header [ 'topic_names' ] = topic_names
header [ 'start_date' ] = str ( datetime . datetime . today ( ) )
header_string = json . dumps ( { 'header' : header } )
tail_removed_header = header_string [ 0 : - 1 ]
self . _output . write ( tail_removed_header + ',\n' )
self . _output . write ( ' "data": [\n' )
self . _has_no_data = True
def callback ( self , msg , topic ) :
if self . _output . closed :
return
raw_msg = '{topic} {msg}' . format ( topic = topic , msg = msg )
if not self . _topic_names or topic in self . _topic_names :
if not self . _has_no_data :
self . _output . write ( ',\n' )
else :
self . _has_no_data = False
self . _output . write ( json . dumps ( [ time . time ( ) , raw_msg ] ) )
def close ( self ) :
if not self . _output . closed :
self . _output . write ( '\n]}' )
self . _output . close ( )
def _handle_signal ( self , signum , frame ) :
self . close ( )
sys . exit ( 0 )
writer = TopicRecorder ( file_path , topic_names )
sub = jps . Subscriber ( '*' , writer . callback , host = host , sub_port = sub_port )
sub . spin ( )
writer . close ( )
|
def _is_entity ( bpe ) :
"""Return True if the element is a physical entity ."""
|
if isinstance ( bpe , _bp ( 'Protein' ) ) or isinstance ( bpe , _bpimpl ( 'Protein' ) ) or isinstance ( bpe , _bp ( 'SmallMolecule' ) ) or isinstance ( bpe , _bpimpl ( 'SmallMolecule' ) ) or isinstance ( bpe , _bp ( 'Complex' ) ) or isinstance ( bpe , _bpimpl ( 'Complex' ) ) or isinstance ( bpe , _bp ( 'Rna' ) ) or isinstance ( bpe , _bpimpl ( 'Rna' ) ) or isinstance ( bpe , _bp ( 'RnaRegion' ) ) or isinstance ( bpe , _bpimpl ( 'RnaRegion' ) ) or isinstance ( bpe , _bp ( 'DnaRegion' ) ) or isinstance ( bpe , _bpimpl ( 'DnaRegion' ) ) or isinstance ( bpe , _bp ( 'PhysicalEntity' ) ) or isinstance ( bpe , _bpimpl ( 'PhysicalEntity' ) ) :
return True
else :
return False
|
def saveInputsToFile ( self , filename ) :
"""Deprecated ."""
|
fp = open ( filename , 'w' )
for input in self . inputs :
vec = self . replacePatterns ( input )
for item in vec :
fp . write ( "%f " % item )
fp . write ( "\n" )
|
def append_dictionary_to_file ( localization_key_to_comment , file_path , section_name ) :
"""Appends dictionary of localization keys and comments to a file
Args :
localization _ key _ to _ comment ( dict ) : A mapping between localization keys and comments .
file _ path ( str ) : The path of the file to append to .
section _ name ( str ) : The name of the section ."""
|
output_file = open_strings_file ( file_path , "a" )
write_section_header_to_file ( output_file , section_name )
for entry_key , entry_comment in sorted ( localization_key_to_comment . iteritems ( ) , key = operator . itemgetter ( 1 ) ) :
output_file . write ( u'\n' )
write_entry_to_file ( output_file , entry_comment , entry_key )
output_file . close ( )
|
def _to_dict ( self ) :
"""Return a json dictionary representing this model ."""
|
_dict = { }
if hasattr ( self , 'score' ) and self . score is not None :
_dict [ 'score' ] = self . score
if hasattr ( self , 'confidence' ) and self . confidence is not None :
_dict [ 'confidence' ] = self . confidence
return _dict
|
def _actionsiter ( self , message_iterator ) :
"""Iterate bulk actions .
: param message _ iterator : Iterator yielding messages from a queue ."""
|
for message in message_iterator :
payload = message . decode ( )
try :
if payload [ 'op' ] == 'delete' :
yield self . _delete_action ( payload )
else :
yield self . _index_action ( payload )
message . ack ( )
except NoResultFound :
message . reject ( )
except Exception :
message . reject ( )
current_app . logger . error ( "Failed to index record {0}" . format ( payload . get ( 'id' ) ) , exc_info = True )
|
def get_parameters ( self , regex_exp , parameters ) :
"""Given a regex expression and the string with the paramers ,
either return a regex match object or raise an exception if the regex
did not find a match
: param regex _ exp :
: param parameters :
: return :"""
|
# TODO find a better way to do the equate replacement
for rep in self . equates :
parameters = parameters . replace ( rep , str ( self . equates [ rep ] ) )
match = re . match ( regex_exp , parameters )
if not match :
raise iarm . exceptions . ParsingError ( "Parameters are None, did you miss a comma?" )
return match . groups ( )
|
def __send_retry_requests ( self , last_send_failure_time ) :
"""Called via Timer from _ _ send _ ready to resend requests which might not have been sent due to transport
failure . This can happen since the current transport implementation does not received acknowledgements
for sent messages ."""
|
# make sure multiple failures having set multiple times do not run concurrently
with self . __send_retry_requests_lock :
with self . __requests : # produce list instead of generator as requests mapping can change during subsequent loop
retry_reqs = [ req for req in self . __requests . values ( ) if req . _sent_without_response ( last_send_failure_time ) ]
retry_req_count = 0
# don ' t continue if another network failure has occured ( which will trigger this function again )
while retry_reqs and self . __amqplink . last_send_exc_time <= last_send_failure_time :
req = retry_reqs . pop ( )
# lock individuallly so incoming request handling does not ' pause ' for too long
with self . __requests : # might have received a response ( or finished since )
if not ( req . id_ in self . __requests and req . _sent_without_response ( last_send_failure_time ) ) :
logger . debug ( 'Not resending request %s (finished or has received response)' , req . id_ )
continue
logger . debug ( 'Resending request %s' , req . id_ )
if not self . __retry_enqueue ( PreparedMessage ( req . _inner_msg_out , req . id_ ) ) : # client shutdown
break
retry_req_count += 1
if retry_req_count :
logger . debug ( 'Resending of %d request(s) complete (before %s)' , retry_req_count , last_send_failure_time )
|
def wait_on_job ( self , job_id ) :
"""Poll task status until STOPPED"""
|
while True :
status = self . get_job_status ( job_id )
if status == 'SUCCEEDED' :
logger . info ( 'Batch job {} SUCCEEDED' . format ( job_id ) )
return True
elif status == 'FAILED' : # Raise and notify if job failed
jobs = self . _client . describe_jobs ( jobs = [ job_id ] ) [ 'jobs' ]
job_str = json . dumps ( jobs , indent = 4 )
logger . debug ( 'Job details:\n' + job_str )
log_stream_name = jobs [ 0 ] [ 'attempts' ] [ 0 ] [ 'container' ] [ 'logStreamName' ]
logs = self . get_logs ( log_stream_name )
raise BatchJobException ( 'Job {} failed: {}' . format ( job_id , logs ) )
time . sleep ( self . poll_time )
logger . debug ( 'Batch job status for job {0}: {1}' . format ( job_id , status ) )
|
def expected_cost_for_region ( short_numobj , region_dialing_from ) :
"""Gets the expected cost category of a short number when dialled from a
region ( however , nothing is implied about its validity ) . If it is
important that the number is valid , then its validity must first be
checked using is _ valid _ short _ number _ for _ region . Note that emergency
numbers are always considered toll - free .
Example usage :
short _ number = " 110"
region _ code = " FR "
if phonenumbers . is _ valid _ short _ number _ for _ region ( short _ number , region _ code ) :
cost = phonenumbers . expected _ cost ( short _ number , region _ code ) # ShortNumberCost
# Do something with the cost information here .
Arguments :
short _ numobj - - the short number for which we want to know the expected cost category
as a PhoneNumber object .
region _ dialing _ from - - the region from which the number is dialed
Return the expected cost category for that region of the short
number . Returns UNKNOWN _ COST if the number does not match a cost
category . Note that an invalid number may match any cost category ."""
|
if not _region_dialing_from_matches_number ( short_numobj , region_dialing_from ) :
return ShortNumberCost . UNKNOWN_COST
# Note that region _ dialing _ from may be None , in which case metadata will also be None .
metadata = PhoneMetadata . short_metadata_for_region ( region_dialing_from )
if metadata is None : # pragma no cover
return ShortNumberCost . UNKNOWN_COST
short_number = national_significant_number ( short_numobj )
# The possible lengths are not present for a particular sub - type if they match the general
# description ; for this reason , we check the possible lengths against the general description
# first to allow an early exit if possible .
if not ( len ( short_number ) in metadata . general_desc . possible_length ) :
return ShortNumberCost . UNKNOWN_COST
# The cost categories are tested in order of decreasing expense , since if
# for some reason the patterns overlap the most expensive matching cost
# category should be returned .
if _matches_possible_number_and_national_number ( short_number , metadata . premium_rate ) :
return ShortNumberCost . PREMIUM_RATE
if _matches_possible_number_and_national_number ( short_number , metadata . standard_rate ) :
return ShortNumberCost . STANDARD_RATE
if _matches_possible_number_and_national_number ( short_number , metadata . toll_free ) :
return ShortNumberCost . TOLL_FREE
if is_emergency_number ( short_number , region_dialing_from ) : # pragma no cover
# Emergency numbers are implicitly toll - free .
return ShortNumberCost . TOLL_FREE
return ShortNumberCost . UNKNOWN_COST
|
def series_lstrip ( series , startswith = 'http://' , ignorecase = True ) :
"""Strip a suffix str ( ` endswith ` str ) from a ` df ` columns or pd . Series of type str"""
|
return series_strip ( series , startswith = startswith , endswith = None , startsorendswith = None , ignorecase = ignorecase )
|
def execute_get ( self , resource , ** kwargs ) :
"""Execute an HTTP GET request against the API endpoints .
This method is meant for internal use .
: param resource : The last part of the URI
: param kwargs : Additional query parameters ( and optionally headers )
: return : The HTTP response as JSON or ` GhostException ` if unsuccessful"""
|
url = '%s/%s' % ( self . base_url , resource )
headers = kwargs . pop ( 'headers' , dict ( ) )
headers [ 'Accept' ] = 'application/json'
headers [ 'Content-Type' ] = 'application/json'
if kwargs :
separator = '&' if '?' in url else '?'
for key , value in kwargs . items ( ) :
if hasattr ( value , '__iter__' ) and type ( value ) not in six . string_types :
url = '%s%s%s=%s' % ( url , separator , key , ',' . join ( value ) )
else :
url = '%s%s%s=%s' % ( url , separator , key , value )
separator = '&'
if self . _access_token :
headers [ 'Authorization' ] = 'Bearer %s' % self . _access_token
else :
separator = '&' if '?' in url else '?'
url = '%s%sclient_id=%s&client_secret=%s' % ( url , separator , self . _client_id , self . _client_secret )
response = requests . get ( url , headers = headers )
if response . status_code // 100 != 2 :
raise GhostException ( response . status_code , response . json ( ) . get ( 'errors' , [ ] ) )
return response . json ( )
|
def _GetNormalizedTimestamp ( self ) :
"""Retrieves the normalized timestamp .
Returns :
decimal . Decimal : normalized timestamp , which contains the number of
seconds since January 1 , 1970 00:00:00 and a fraction of second used
for increased precision , or None if the normalized timestamp cannot be
determined ."""
|
if self . _normalized_timestamp is None :
if self . _timestamp is not None :
self . _normalized_timestamp = decimal . Decimal ( self . _timestamp )
if self . fraction_of_second is not None :
fraction_of_second = decimal . Decimal ( self . fraction_of_second )
if self . _precision == dfdatetime_definitions . PRECISION_1_NANOSECOND :
fraction_of_second /= self . _NANOSECONDS_PER_SECOND
else :
fraction_of_second /= self . _100_NANOSECONDS_PER_SECOND
self . _normalized_timestamp += fraction_of_second
return self . _normalized_timestamp
|
def Dictionary ( self ) :
"""Emulates the items ( ) method of dictionaries ."""
|
d = self . __dict__ [ '__subject' ] . Dictionary ( ) . copy ( )
d . update ( self . __dict__ [ 'overrides' ] )
return d
|
def generate_hash ( data : dict , token : str ) -> str :
"""Generate secret hash
: param data :
: param token :
: return :"""
|
secret = hashlib . sha256 ( )
secret . update ( token . encode ( 'utf-8' ) )
sorted_params = collections . OrderedDict ( sorted ( data . items ( ) ) )
msg = '\n' . join ( "{}={}" . format ( k , v ) for k , v in sorted_params . items ( ) if k != 'hash' )
return hmac . new ( secret . digest ( ) , msg . encode ( 'utf-8' ) , digestmod = hashlib . sha256 ) . hexdigest ( )
|
def make_environment_relocatable ( home_dir ) :
"""Makes the already - existing environment use relative paths , and takes out
the # ! - based environment selection in scripts ."""
|
activate_this = os . path . join ( home_dir , 'bin' , 'activate_this.py' )
if not os . path . exists ( activate_this ) :
logger . fatal ( 'The environment doesn\'t have a file %s -- please re-run virtualenv ' 'on this environment to update it' % activate_this )
fixup_scripts ( home_dir )
fixup_pth_and_egg_link ( home_dir )
|
def one_of ( * args ) :
"""Validates that a field value matches one of the values
given to this validator ."""
|
if len ( args ) == 1 and isinstance ( args [ 0 ] , list ) :
items = args [ 0 ]
else :
items = list ( args )
def validate ( value ) :
if not value in items :
return e ( "{} is not in the list {}" , value , items )
return validate
|
def remove_custom_css ( destdir , resource = PKGNAME ) :
"""Remove the kernel CSS from custom . css"""
|
# Remove the inclusion in the main CSS
if not os . path . isdir ( destdir ) :
return False
custom = os . path . join ( destdir , 'custom.css' )
copy = True
found = False
prefix = css_frame_prefix ( resource )
with io . open ( custom + '-new' , 'wt' ) as fout :
with io . open ( custom ) as fin :
for line in fin :
if line . startswith ( prefix + 'START' ) :
copy = False
found = True
elif line . startswith ( prefix + 'END' ) :
copy = True
elif copy :
fout . write ( line )
if found :
os . rename ( custom + '-new' , custom )
else :
os . unlink ( custom + '-new' )
return found
|
def todatetime ( self ) :
"""Converts the current instance to the python builtins : py : class : ` datetime . datetime ` instance .
: return : the new : py : class : ` datetime . datetime ` instance representing the current date and time in gregorian calendar .
: rtype : : py : class : ` datetime . datetime `"""
|
arr = get_gregorian_date_from_julian_day ( self . tojulianday ( ) )
return datetime ( int ( arr [ 0 ] ) , int ( arr [ 1 ] ) , int ( arr [ 2 ] ) , self . hour , self . minute , self . second , self . microsecond , self . tzinfo )
|
def run0 ( self ) :
"""Run one item ( a callback or an RPC wait _ any ) .
Returns :
A time to sleep if something happened ( may be 0 ) ;
None if all queues are empty ."""
|
if self . current :
self . inactive = 0
callback , args , kwds = self . current . popleft ( )
_logging_debug ( 'nowevent: %s' , callback . __name__ )
callback ( * args , ** kwds )
return 0
if self . run_idle ( ) :
return 0
delay = None
if self . queue :
delay = self . queue [ 0 ] [ 0 ] - self . clock . now ( )
if delay <= 0 :
self . inactive = 0
_ , callback , args , kwds = self . queue . pop ( 0 )
_logging_debug ( 'event: %s' , callback . __name__ )
callback ( * args , ** kwds )
# TODO : What if it raises an exception ?
return 0
if self . rpcs :
self . inactive = 0
rpc = datastore_rpc . MultiRpc . wait_any ( self . rpcs )
if rpc is not None :
_logging_debug ( 'rpc: %s.%s' , rpc . service , rpc . method )
# Yes , wait _ any ( ) may return None even for a non - empty argument .
# But no , it won ' t ever return an RPC not in its argument .
if rpc not in self . rpcs :
raise RuntimeError ( 'rpc %r was not given to wait_any as a choice %r' % ( rpc , self . rpcs ) )
callback , args , kwds = self . rpcs [ rpc ]
del self . rpcs [ rpc ]
if callback is not None :
callback ( * args , ** kwds )
# TODO : Again , what about exceptions ?
return 0
return delay
|
def cwt ( ts , freqs = np . logspace ( 0 , 2 ) , wavelet = cwtmorlet , plot = True ) :
"""Continuous wavelet transform
Note the full results can use a huge amount of memory at 64 - bit precision
Args :
ts : Timeseries of m variables , shape ( n , m ) . Assumed constant timestep .
freqs : list of frequencies ( in Hz ) to use for the tranform .
( default is 50 frequency bins logarithmic from 1Hz to 100Hz )
wavelet : the wavelet to use . may be complex . see scipy . signal . wavelets
plot : whether to plot time - resolved power spectrum
Returns :
coefs : Continuous wavelet transform output array , shape ( n , len ( freqs ) , m )"""
|
orig_ndim = ts . ndim
if ts . ndim is 1 :
ts = ts [ : , np . newaxis ]
channels = ts . shape [ 1 ]
fs = ( len ( ts ) - 1.0 ) / ( 1.0 * ts . tspan [ - 1 ] - ts . tspan [ 0 ] )
x = signal . detrend ( ts , axis = 0 )
dtype = wavelet ( fs / freqs [ 0 ] , fs / freqs [ 0 ] ) . dtype
coefs = np . zeros ( ( len ( ts ) , len ( freqs ) , channels ) , dtype )
for i in range ( channels ) :
coefs [ : , : , i ] = roughcwt ( x [ : , i ] , cwtmorlet , fs / freqs ) . T
if plot :
_plot_cwt ( ts , coefs , freqs )
if orig_ndim is 1 :
coefs = coefs [ : , : , 0 ]
return coefs
|
def get_parser ( self , prog_name ) :
"""Override to add command options ."""
|
parser = argparse . ArgumentParser ( description = self . get_description ( ) , prog = prog_name , add_help = False )
return parser
|
def config_filter_lines ( parent_regex , child_regex , source = 'running' ) :
r'''. . versionadded : : 2019.2.0
Return a list of detailed matches , for the configuration blocks ( parent - child
relationship ) whose parent respects the regular expressions configured via
the ` ` parent _ regex ` ` argument , and the child matches the ` ` child _ regex ` `
regular expression . The result is a list of dictionaries with the following
keys :
- ` ` match ` ` : a boolean value that tells whether ` ` child _ regex ` ` matched any
children lines .
- ` ` parent ` ` : the parent line ( as text ) .
- ` ` child ` ` : the child line ( as text ) . If no child line matched , this field
will be ` ` None ` ` .
. . note : :
This function is only available only when the underlying library
` ciscoconfparse < http : / / www . pennington . net / py / ciscoconfparse / index . html > ` _
is installed . See
: py : func : ` ciscoconfparse module < salt . modules . ciscoconfparse _ mod > ` for
more details .
parent _ regex
The regular expression to match the parent configuration lines against .
child _ regex
The regular expression to match the child configuration lines against .
source : ` ` running ` `
The configuration type to retrieve from the network device . Default :
` ` running ` ` . Available options : ` ` running ` ` , ` ` startup ` ` , ` ` candidate ` ` .
CLI Example :
. . code - block : : bash
salt ' * ' napalm . config _ filter _ lines ' ^ interface ' ' ip address '
salt ' * ' napalm . config _ filter _ lines ' ^ interface ' ' shutdown ' source = candidate'''
|
config_txt = __salt__ [ 'net.config' ] ( source = source ) [ 'out' ] [ source ]
return __salt__ [ 'ciscoconfparse.filter_lines' ] ( config = config_txt , parent_regex = parent_regex , child_regex = child_regex )
|
def pypackable ( name , pytype , format ) :
"""Create a " mix - in " class with a python type and a
Packable with the given struct format"""
|
size , items = _formatinfo ( format )
return type ( Packable ) ( name , ( pytype , Packable ) , { '_format_' : format , '_size_' : size , '_items_' : items , } )
|
def close ( self ) :
"""Close and delete instance ."""
|
# remove callbacks
DatastoreLegacy . datastores [ self . domain ] . remove ( self )
# delete data after the last instance is gone
if self . release_storage and not DatastoreLegacy . datastores [ self . domain ] :
del DatastoreLegacy . store [ self . domain ]
del self
|
def _unset_child ( self , name , child ) :
"""Untie child from parent .
: param name : Child name .
: param child : Parentable object ."""
|
if name not in self . _children or self . _children [ name ] is not child :
msg = 'Child {child} with name "{name}" is not found'
raise ValueError ( msg . format ( child = child , name = name ) )
child . _set_parent ( None )
self . _remove_child ( name , child )
|
def BNF ( ) :
"""expop : : ' ^ '
multop : : ' * ' | ' / '
addop : : ' + ' | ' - '
integer : : [ ' + ' | ' - ' ] ' 0 ' . . ' 9 ' +
atom : : PI | E | real | fn ' ( ' expr ' ) ' | ' ( ' expr ' ) '
factor : : atom [ expop factor ] *
term : : factor [ multop factor ] *
expr : : term [ addop term ] *"""
|
global bnf
if not bnf :
point = Literal ( "." )
# use CaselessKeyword for e and pi , to avoid accidentally matching
# functions that start with ' e ' or ' pi ' ( such as ' exp ' ) ; Keyword
# and CaselessKeyword only match whole words
e = CaselessKeyword ( "E" )
pi = CaselessKeyword ( "PI" )
# ~ fnumber = Combine ( Word ( " + - " + nums , nums ) +
# ~ Optional ( point + Optional ( Word ( nums ) ) ) +
# ~ Optional ( e + Word ( " + - " + nums , nums ) ) )
fnumber = Regex ( r"[+-]?\d+(?:\.\d*)?(?:[eE][+-]?\d+)?" )
ident = Word ( alphas , alphanums + "_$" )
plus , minus , mult , div = map ( Literal , "+-*/" )
lpar , rpar = map ( Suppress , "()" )
addop = plus | minus
multop = mult | div
expop = Literal ( "^" )
expr = Forward ( )
atom = ( ( 0 , None ) * minus + ( pi | e | fnumber | ident + lpar + expr + rpar | ident ) . setParseAction ( pushFirst ) | Group ( lpar + expr + rpar ) ) . setParseAction ( pushUMinus )
# by defining exponentiation as " atom [ ^ factor ] . . . " instead of " atom [ ^ atom ] . . . " , we get right - to - left exponents , instead of left - to - righ
# that is , 2 ^ 3 ^ 2 = 2 ^ ( 3 ^ 2 ) , not ( 2 ^ 3 ) ^ 2.
factor = Forward ( )
factor << atom + ZeroOrMore ( ( expop + factor ) . setParseAction ( pushFirst ) )
term = factor + ZeroOrMore ( ( multop + factor ) . setParseAction ( pushFirst ) )
expr << term + ZeroOrMore ( ( addop + term ) . setParseAction ( pushFirst ) )
bnf = expr
return bnf
|
def cisco_conf_parse_parents ( parent , child , config ) :
"""Use CiscoConfParse to find parent lines that contain a specific child line .
: param parent : The parent line to search for
: param child : The child line required under the given parent
: param config : The device running / startup config"""
|
if type ( config ) == str :
config = config . splitlines ( )
parse = CiscoConfParse ( config )
cfg_obj = parse . find_parents_w_child ( parent , child )
return cfg_obj
|
def boot ( zone , single = False , altinit = None , smf_options = None ) :
'''Boot ( or activate ) the specified zone .
zone : string
name or uuid of the zone
single : boolean
boots only to milestone svc : / milestone / single - user : default .
altinit : string
valid path to an alternative executable to be the primordial process .
smf _ options : string
include two categories of options to control booting behavior of
the service management facility : recovery options and messages options .
CLI Example :
. . code - block : : bash
salt ' * ' zoneadm . boot clementine
salt ' * ' zoneadm . boot maeve single = True
salt ' * ' zoneadm . boot teddy single = True smf _ options = verbose'''
|
ret = { 'status' : True }
# # build boot _ options
boot_options = ''
if single :
boot_options = '-s {0}' . format ( boot_options )
if altinit : # note : we cannot validate the path , as this is local to the zonepath .
boot_options = '-i {0} {1}' . format ( altinit , boot_options )
if smf_options :
boot_options = '-m {0} {1}' . format ( smf_options , boot_options )
if boot_options != '' :
boot_options = ' -- {0}' . format ( boot_options . strip ( ) )
# # execute boot
res = __salt__ [ 'cmd.run_all' ] ( 'zoneadm {zone} boot{boot_opts}' . format ( zone = '-u {0}' . format ( zone ) if _is_uuid ( zone ) else '-z {0}' . format ( zone ) , boot_opts = boot_options , ) )
ret [ 'status' ] = res [ 'retcode' ] == 0
ret [ 'message' ] = res [ 'stdout' ] if ret [ 'status' ] else res [ 'stderr' ]
ret [ 'message' ] = ret [ 'message' ] . replace ( 'zoneadm: ' , '' )
if ret [ 'message' ] == '' :
del ret [ 'message' ]
return ret
|
def setup ( app ) :
"""Set up the plugin"""
|
app . add_config_value ( 'sphinx_tabs_nowarn' , False , '' )
app . add_config_value ( 'sphinx_tabs_valid_builders' , [ ] , '' )
app . add_directive ( 'tabs' , TabsDirective )
app . add_directive ( 'tab' , TabDirective )
app . add_directive ( 'group-tab' , GroupTabDirective )
app . add_directive ( 'code-tab' , CodeTabDirective )
for path in [ 'sphinx_tabs/' + f for f in FILES ] :
if path . endswith ( '.css' ) :
if 'add_css_file' in dir ( app ) :
app . add_css_file ( path )
else :
app . add_stylesheet ( path )
if path . endswith ( '.js' ) :
if 'add_script_file' in dir ( app ) :
app . add_script_file ( path )
else :
app . add_javascript ( path )
app . connect ( 'html-page-context' , update_context )
app . connect ( 'build-finished' , copy_assets )
|
def _instantiate_layers ( self ) :
"""Instantiates all the linear modules used in the network .
Layers are instantiated in the constructor , as opposed to the build
function , because MLP implements the Transposable interface , and the
transpose function can be called before the module is actually connected
to the graph and build is called .
Notice that this is safe since layers in the transposed module are
instantiated using a lambda returning input _ size of the mlp layers , and
this doesn ' t have to return sensible values until the original module is
connected to the graph ."""
|
# Here we are entering the module ' s variable scope to name our submodules
# correctly ( not to create variables ) . As such it ' s safe to not check
# whether we ' re in the same graph . This is important if we ' re constructing
# the module in one graph and connecting it in another ( e . g . with ` defun `
# the module is created in some default graph , and connected to a capturing
# graph in order to turn it into a graph function ) .
with self . _enter_variable_scope ( check_same_graph = False ) :
self . _layers = [ basic . Linear ( self . _output_sizes [ i ] , name = "linear_{}" . format ( i ) , initializers = self . _initializers , partitioners = self . _partitioners , regularizers = self . _regularizers , use_bias = self . use_bias ) for i in xrange ( self . _num_layers ) ]
|
def prepare_static_data ( self , data ) :
"""If user defined static fields , then process them with visiable value"""
|
d = data . copy ( )
for f in self . get_fields ( ) :
if f [ 'static' ] and f [ 'name' ] in d :
d [ f [ 'name' ] ] = make_view_field ( f , None , self . types_convert_map , self . fields_convert_map , d [ f [ 'name' ] ] ) [ 'display' ]
return d
|
def make_oracle ( input_qubits , output_qubit , secret_factor_bits , secret_bias_bit ) :
"""Gates implementing the function f ( a ) = a · factors + bias ( mod 2 ) ."""
|
if secret_bias_bit :
yield cirq . X ( output_qubit )
for qubit , bit in zip ( input_qubits , secret_factor_bits ) :
if bit :
yield cirq . CNOT ( qubit , output_qubit )
|
def setup ( app ) :
"""Allow this module to be used as sphinx extension .
This attaches the Sphinx hooks .
: type app : sphinx . application . Sphinx"""
|
import sphinxcontrib_django . docstrings
import sphinxcontrib_django . roles
# Setup both modules at once . They can also be separately imported to
# use only fragments of this package .
sphinxcontrib_django . docstrings . setup ( app )
sphinxcontrib_django . roles . setup ( app )
|
def start ( self ) :
"""Starts this QEMU VM ."""
|
with ( yield from self . _execute_lock ) :
if self . is_running ( ) : # resume the VM if it is paused
yield from self . resume ( )
return
if self . _manager . config . get_section_config ( "Qemu" ) . getboolean ( "monitor" , True ) :
try :
info = socket . getaddrinfo ( self . _monitor_host , 0 , socket . AF_UNSPEC , socket . SOCK_STREAM , 0 , socket . AI_PASSIVE )
if not info :
raise QemuError ( "getaddrinfo returns an empty list on {}" . format ( self . _monitor_host ) )
for res in info :
af , socktype , proto , _ , sa = res
# let the OS find an unused port for the Qemu monitor
with socket . socket ( af , socktype , proto ) as sock :
sock . bind ( sa )
self . _monitor = sock . getsockname ( ) [ 1 ]
except OSError as e :
raise QemuError ( "Could not find free port for the Qemu monitor: {}" . format ( e ) )
# check if there is enough RAM to run
self . check_available_ram ( self . ram )
command = yield from self . _build_command ( )
command_string = " " . join ( shlex . quote ( s ) for s in command )
try :
log . info ( "Starting QEMU with: {}" . format ( command_string ) )
self . _stdout_file = os . path . join ( self . working_dir , "qemu.log" )
log . info ( "logging to {}" . format ( self . _stdout_file ) )
with open ( self . _stdout_file , "w" , encoding = "utf-8" ) as fd :
fd . write ( "Start QEMU with {}\n\nExecution log:\n" . format ( command_string ) )
self . command_line = ' ' . join ( command )
self . _process = yield from asyncio . create_subprocess_exec ( * command , stdout = fd , stderr = subprocess . STDOUT , cwd = self . working_dir )
yield from self . _start_ubridge ( )
for adapter_number , adapter in enumerate ( self . _ethernet_adapters ) :
nio = adapter . get_nio ( 0 )
if nio :
yield from self . add_ubridge_udp_connection ( "QEMU-{}-{}" . format ( self . _id , adapter_number ) , self . _local_udp_tunnels [ adapter_number ] [ 1 ] , nio )
log . info ( 'QEMU VM "{}" started PID={}' . format ( self . _name , self . _process . pid ) )
self . status = "started"
monitor_process ( self . _process , self . _termination_callback )
except ( OSError , subprocess . SubprocessError , UnicodeEncodeError ) as e :
stdout = self . read_stdout ( )
log . error ( "Could not start QEMU {}: {}\n{}" . format ( self . qemu_path , e , stdout ) )
raise QemuError ( "Could not start QEMU {}: {}\n{}" . format ( self . qemu_path , e , stdout ) )
yield from self . _set_process_priority ( )
if self . _cpu_throttling :
self . _set_cpu_throttling ( )
if "-enable-kvm" in command_string :
self . _hw_virtualization = True
try :
yield from self . start_wrap_console ( )
except OSError as e :
raise QemuError ( "Could not start QEMU console {}\n" . format ( e ) )
|
def dao_fork_at ( dao_fork_block_number : BlockNumber , chain_class : Type [ BaseChain ] ) -> Type [ BaseChain ] :
"""Set the block number on which the DAO fork will happen . Requires that a
version of the : class : ` ~ eth . vm . forks . homestead . HomesteadVM ` is present in
the chain ' s ` ` vm _ configuration ` `"""
|
homstead_vms_found = any ( _is_homestead ( vm_class ) for _ , vm_class in chain_class . vm_configuration )
if not homstead_vms_found :
raise ValidationError ( "No HomesteadVM found in vm_configuration." )
vm_configuration = _set_vm_dao_fork_block_number ( dao_fork_block_number , chain_class . vm_configuration , )
return chain_class . configure ( vm_configuration = vm_configuration )
|
def always ( func : Callable [ [ ] , Generator ] ) -> AlwaysFixture :
"""Decorator that registers an ' always ' fixture , which is always run before all provider
state fixtures and faasport call ."""
|
global user_always
if user_always is not None :
raise RuntimeError ( 'Multiple definitions of @always fixture.' )
as_context_manager = contextmanager ( func )
user_always = as_context_manager
return as_context_manager
|
def scp_file_remote_to_local ( self , remote_path , local_path ) :
"""Scp a remote file to local
Args :
remote _ path ( str )
local _ path ( str )"""
|
sshadd_command = [ 'ssh-add' , '/Users/pyrat/.ssh/ubuntuNode' ]
self . info_log ( "executing command: %s" % ' ' . join ( sshadd_command ) )
p = subprocess . Popen ( sshadd_command )
p . wait ( )
scp_command = [ 'scp' , '-o' , 'StrictHostKeyChecking=no' , '%s@%s:"%s"' % ( self . browser_config . get ( 'username' ) , self . get_ip ( ) , remote_path ) , local_path ]
self . info_log ( "executing command: %s" % ' ' . join ( scp_command ) )
p = subprocess . Popen ( scp_command )
p . wait ( )
|
def kindpath ( self , kind ) :
"""Returns a path to the resources for a given input kind .
: param ` kind ` : The kind of input :
- " ad " : Active Directory
- " monitor " : Files and directories
- " registry " : Windows Registry
- " script " : Scripts
- " splunktcp " : TCP , processed
- " tcp " : TCP , unprocessed
- " udp " : UDP
- " win - event - log - collections " : Windows event log
- " win - perfmon " : Performance monitoring
- " win - wmi - collections " : WMI
: type kind : ` ` string ` `
: return : The relative endpoint path .
: rtype : ` ` string ` `"""
|
if kind == 'tcp' :
return UrlEncoded ( 'tcp/raw' , skip_encode = True )
elif kind == 'splunktcp' :
return UrlEncoded ( 'tcp/cooked' , skip_encode = True )
else :
return UrlEncoded ( kind , skip_encode = True )
|
def fetch_data ( self , stock_no , nowdatetime ) :
"""Fetch data from twse . com . tw
return list .
從 twse . com . tw 下載資料 , 回傳格式為 csv . reader
0 . 日期
1 . 成交股數
2 . 成交金額
3 . 開盤價
4 . 最高價 ( 續 )
5 . 最低價
6 . 收盤價
7 . 漲跌價差
8 . 成交筆數
: param str stock _ no : 股票代碼
: param datetime nowdatetime : 此刻時間
: rtype : list"""
|
result = TWSE_CONNECTIONS . request ( 'POST' , '/ch/trading/exchange/STOCK_DAY/STOCK_DAYMAIN.php' , fields = { 'download' : 'csv' , 'query_year' : nowdatetime . year , 'query_month' : nowdatetime . month , 'CO_ID' : stock_no } )
_de = result . data . decode ( 'cp950' , 'ignore' )
csv_files = csv . reader ( StringIO ( _de . encode ( 'utf-8' ) ) )
return csv_files
|
def get_all_loopbacks ( engine ) :
"""Get all loopback interfaces for a given engine"""
|
data = [ ]
if 'fw_cluster' in engine . type :
for cvi in engine . data . get ( 'loopback_cluster_virtual_interface' , [ ] ) :
data . append ( LoopbackClusterInterface ( cvi , engine ) )
for node in engine . nodes :
for lb in node . data . get ( 'loopback_node_dedicated_interface' , [ ] ) :
data . append ( LoopbackInterface ( lb , engine ) )
return data
|
def cmd_wp_remove ( self , args ) :
'''handle wp remove'''
|
if len ( args ) != 1 :
print ( "usage: wp remove WPNUM" )
return
idx = int ( args [ 0 ] )
if idx < 0 or idx >= self . wploader . count ( ) :
print ( "Invalid wp number %u" % idx )
return
wp = self . wploader . wp ( idx )
# setup for undo
self . undo_wp = copy . copy ( wp )
self . undo_wp_idx = idx
self . undo_type = "remove"
self . wploader . remove ( wp )
self . fix_jumps ( idx , - 1 )
self . send_all_waypoints ( )
print ( "Removed WP %u" % idx )
|
def _include_exclude ( file_path , include = None , exclude = None ) :
"""Check if file matches one of include filters and not in exclude filter .
: param file _ path : Path to the file .
: param include : Tuple containing patterns to which include from result .
: param exclude : Tuple containing patterns to which exclude from result ."""
|
if exclude is not None and exclude :
for pattern in exclude :
if file_path . match ( pattern ) :
return False
if include is not None and include :
for pattern in include :
if file_path . match ( pattern ) :
return True
return False
return True
|
def export ( self , output : str = None , exclude : List [ str ] = None , ** kwargs ) :
"""Export the collection item in the Mimetype required .
. . note : : If current implementation does not have special mimetypes , reuses default _ export method
: param output : Mimetype to export to ( Uses MyCapytain . common . constants . Mimetypes )
: type output : str
: param exclude : Information to exclude . Specific to implementations
: type exclude : [ str ]
: return : Object using a different representation"""
|
return Exportable . export ( self , output , exclude = exclude , ** kwargs )
|
async def read_headers ( stream : asyncio . StreamReader ) -> "Headers" :
"""Read HTTP headers from ` ` stream ` ` .
` ` stream ` ` is an : class : ` ~ asyncio . StreamReader ` .
Return a : class : ` Headers ` instance
Non - ASCII characters are represented with surrogate escapes ."""
|
# https : / / tools . ietf . org / html / rfc7230 # section - 3.2
# We don ' t attempt to support obsolete line folding .
headers = Headers ( )
for _ in range ( MAX_HEADERS + 1 ) :
line = await read_line ( stream )
if line == b"" :
break
# This may raise " ValueError : not enough values to unpack "
raw_name , raw_value = line . split ( b":" , 1 )
if not _token_re . fullmatch ( raw_name ) :
raise ValueError ( "Invalid HTTP header name: %r" % raw_name )
raw_value = raw_value . strip ( b" \t" )
if not _value_re . fullmatch ( raw_value ) :
raise ValueError ( "Invalid HTTP header value: %r" % raw_value )
name = raw_name . decode ( "ascii" )
# guaranteed to be ASCII at this point
value = raw_value . decode ( "ascii" , "surrogateescape" )
headers [ name ] = value
else :
raise ValueError ( "Too many HTTP headers" )
return headers
|
def disapproveworker ( ctx , workers , account ) :
"""Disapprove worker ( es )"""
|
print_tx ( ctx . bitshares . disapproveworker ( workers , account = account ) )
|
def main ( ) :
"""Fetch simple gene - term assocaitions from Golr using bioentity document type , one line per gene ."""
|
import argparse
prs = argparse . ArgumentParser ( __doc__ , formatter_class = argparse . ArgumentDefaultsHelpFormatter )
prs . add_argument ( '--taxon_id' , type = str , help = 'NCBI taxon ID, must match exact species/strain used by GO Central, e.g. 4896 for S Pombe' )
prs . add_argument ( '--golr_url' , default = 'http://golr.geneontology.org/solr/' , type = str , help = 'NCBI taxon ID, must match exact species/strain used by GO Central, e.g. 4896 for S Pombe' )
prs . add_argument ( '-o' , default = None , type = str , help = "Specifies the name of the output file" )
prs . add_argument ( '--max_rows' , default = 100000 , type = int , help = "maximum rows to be fetched" )
args = prs . parse_args ( )
solr = pysolr . Solr ( args . golr_url , timeout = 30 )
sys . stderr . write ( "TAX:" + args . taxon_id + "\n" )
results = solr . search ( q = 'document_category:"bioentity" AND taxon:"NCBITaxon:' + args . taxon_id + '"' , fl = 'bioentity_label,annotation_class_list' , rows = args . max_rows )
sys . stderr . write ( "NUM GENES:" + str ( len ( results ) ) + "\n" )
if ( len ( results ) == 0 ) :
sys . stderr . write ( "NO RESULTS" )
exit ( 1 )
if ( len ( results ) == args . max_rows ) :
sys . stderr . write ( "max_rows set too low" )
exit ( 1 )
file_out = sys . stdout if args . o is None else open ( args . o , 'w' )
for r in results :
gene_symbol = r [ 'bioentity_label' ]
sys . stderr . write ( gene_symbol + "\n" )
if 'annotation_class_list' in r :
file_out . write ( r [ 'bioentity_label' ] + "\t" + ';' . join ( r [ 'annotation_class_list' ] ) + "\n" )
else :
sys . stderr . write ( "no annotations for " + gene_symbol + "\n" )
if args . o is not None :
file_out . close ( )
sys . stdout . write ( " WROTE: {}\n" . format ( args . o ) )
|
def p_recent ( self , kind , cur_p = '' , with_catalog = True , with_date = True ) :
'''List posts that recent edited , partially .'''
|
if cur_p == '' :
current_page_number = 1
else :
current_page_number = int ( cur_p )
current_page_number = 1 if current_page_number < 1 else current_page_number
pager_num = int ( MPost . total_number ( kind ) / CMS_CFG [ 'list_num' ] )
kwd = { 'pager' : '' , 'title' : 'Recent posts.' , 'with_catalog' : with_catalog , 'with_date' : with_date , 'kind' : kind , 'current_page' : current_page_number , 'post_count' : MPost . get_counts ( ) , 'router' : config . router_post [ kind ] , }
self . render ( 'admin/post_ajax/post_list.html' , kwd = kwd , view = MPost . query_recent ( num = 20 , kind = kind ) , infos = MPost . query_pager_by_slug ( kind = kind , current_page_num = current_page_number ) , format_date = tools . format_date , userinfo = self . userinfo , cfg = CMS_CFG , )
|
def _find_methods ( cls , * names , ** kwds ) :
"""Compute a list of composable methods .
Because this is a common operation and the class hierarchy is
static , the outcome is cached ( assuming that for a particular list
of names the reversed flag is either always on , or always off ) .
Args :
* names : One or more method names .
reverse : Optional flag , default False ; if True , the list is
reversed .
Returns :
A list of callable class method objects ."""
|
reverse = kwds . pop ( 'reverse' , False )
assert not kwds , repr ( kwds )
cache = cls . __dict__ . get ( '_find_methods_cache' )
if cache :
hit = cache . get ( names )
if hit is not None :
return hit
else :
cls . _find_methods_cache = cache = { }
methods = [ ]
for c in cls . __mro__ :
for name in names :
method = c . __dict__ . get ( name )
if method is not None :
methods . append ( method )
if reverse :
methods . reverse ( )
cache [ names ] = methods
return methods
|
def cmd_line ( line , ctrl ) :
clients = ctrl . modules
""": type : list [ WrapperClient ]"""
|
if line == "update start" :
for client in clients :
client . updating_start ( )
elif line == "update stop" :
for client in clients :
client . updating_stop ( )
return line
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.