signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def _extract_specification_parts ( specification ) : # type : ( str ) - > Tuple [ str , str ]
"""Extract the language and the interface from a " language : / interface "
interface name
: param specification : The formatted interface name
: return : A ( language , interface name ) tuple
: raise ValueError : Invalid specification content""" | try : # Parse the URI - like string
parsed = urlparse ( specification )
except : # Invalid URL
raise ValueError ( "Invalid specification URL: {0}" . format ( specification ) )
# Extract the interface name
interface = parsed . path
# Extract the language , if given
language = parsed . scheme
if not language : # Simple name , without scheme
language = PYTHON_LANGUAGE
else : # Formatted name : un - escape it , without the starting ' / '
interface = _unescape_specification ( interface [ 1 : ] )
return language , interface |
async def findArtifactFromTask ( self , * args , ** kwargs ) :
"""Get Artifact From Indexed Task
Find a task by index path and redirect to the artifact on the most recent
run with the given ` name ` .
Note that multiple calls to this endpoint may return artifacts from differen tasks
if a new task is inserted into the index between calls . Avoid using this method as
a stable link to multiple , connected files if the index path does not contain a
unique identifier . For example , the following two links may return unrelated files :
* https : / / tc . example . com / api / index / v1 / task / some - app . win64 . latest . installer / artifacts / public / installer . exe `
* https : / / tc . example . com / api / index / v1 / task / some - app . win64 . latest . installer / artifacts / public / debug - symbols . zip `
This problem be remedied by including the revision in the index path or by bundling both
installer and debug symbols into a single artifact .
If no task exists for the given index path , this API end - point responds with 404.
This method is ` ` stable ` `""" | return await self . _makeApiCall ( self . funcinfo [ "findArtifactFromTask" ] , * args , ** kwargs ) |
def toNumber ( str , default = None ) :
"""toNumber ( str [ , default ] ) - > integer | float | default
Converts the given string to a numeric value . The string may be a
hexadecimal , integer , or floating number . If string could not be
converted , default ( None ) is returned .
Examples :
> > > n = toNumber ( " 0x2A " )
> > > assert type ( n ) is int and n = = 42
> > > n = toNumber ( " 42 " )
> > > assert type ( n ) is int and n = = 42
> > > n = toNumber ( " 42.0 " )
> > > assert type ( n ) is float and n = = 42.0
> > > n = toNumber ( " Foo " , 42)
> > > assert type ( n ) is int and n = = 42
> > > n = toNumber ( " Foo " )
> > > assert n is None""" | value = default
try :
if str . startswith ( "0x" ) :
value = int ( str , 16 )
else :
try :
value = int ( str )
except ValueError :
value = float ( str )
except ValueError :
pass
return value |
def layer ( op ) :
'''Decorator for composable network layers .''' | def layer_decorated ( self , * args , ** kwargs ) : # Automatically set a name if not provided .
name = kwargs . setdefault ( 'name' , self . get_unique_name ( op . __name__ ) )
# Figure out the layer inputs .
if len ( self . terminals ) == 0 :
raise RuntimeError ( 'No input variables found for layer %s.' % name )
elif len ( self . terminals ) == 1 :
layer_input = self . terminals [ 0 ]
else :
layer_input = list ( self . terminals )
# Perform the operation and get the output .
layer_output = op ( self , layer_input , * args , ** kwargs )
# Add to layer LUT .
self . layers [ name ] = layer_output
# This output is now the input for the next layer .
self . feed ( layer_output )
# Return self for chained calls .
return self
return layer_decorated |
def unzip_file ( self , zip_path , output_path ) :
"""Unzip a local file into a specified directory .""" | with zipfile . ZipFile ( zip_path , 'r' ) as z :
z . extractall ( output_path ) |
def bytes_from_decode_data ( s ) :
"""copy from base64 . _ bytes _ from _ decode _ data""" | if isinstance ( s , ( str , unicode ) ) :
try :
return s . encode ( 'ascii' )
except UnicodeEncodeError :
raise NotValidParamError ( 'String argument should contain only ASCII characters' )
if isinstance ( s , bytes_types ) :
return s
try :
return memoryview ( s ) . tobytes ( )
except TypeError :
raise suppress_context ( TypeError ( 'Argument should be a bytes-like object or ASCII string, not ' '{!r}' . format ( s . __class__ . __name__ ) ) ) |
def at ( self , t ) :
"""Return the modelled tidal height at given times .
Arguments :
t - - array of times at which to evaluate the tidal height""" | t0 = t [ 0 ]
hours = self . _hours ( t0 , t )
partition = 240.0
t = self . _partition ( hours , partition )
times = self . _times ( t0 , [ ( i + 0.5 ) * partition for i in range ( len ( t ) ) ] )
speed , u , f , V0 = self . prepare ( t0 , times , radians = True )
H = self . model [ 'amplitude' ] [ : , np . newaxis ]
p = d2r * self . model [ 'phase' ] [ : , np . newaxis ]
return np . concatenate ( [ Tide . _tidal_series ( t_i , H , p , speed , u_i , f_i , V0 ) for t_i , u_i , f_i in izip ( t , u , f ) ] ) |
def run_latex_report ( base , report_dir , section_info ) :
"""Generate a pdf report with plots using latex .""" | out_name = "%s_recal_plots.tex" % base
out = os . path . join ( report_dir , out_name )
with open ( out , "w" ) as out_handle :
out_tmpl = Template ( out_template )
out_handle . write ( out_tmpl . render ( sections = section_info ) )
start_dir = os . getcwd ( )
try :
os . chdir ( report_dir )
cl = [ "pdflatex" , out_name ]
child = subprocess . Popen ( cl )
child . wait ( )
finally :
os . chdir ( start_dir ) |
def summary ( self ) :
"""Summary statistics describing the fit .
Set alpha property in the object before calling .
Returns
df : DataFrame
Contains columns coef , np . exp ( coef ) , se ( coef ) , z , p , lower , upper""" | ci = 1 - self . alpha
with np . errstate ( invalid = "ignore" , divide = "ignore" ) :
df = pd . DataFrame ( index = self . hazards_ . index )
df [ "coef" ] = self . hazards_
df [ "exp(coef)" ] = np . exp ( self . hazards_ )
df [ "se(coef)" ] = self . standard_errors_
df [ "z" ] = self . _compute_z_values ( )
df [ "p" ] = self . _compute_p_values ( )
df [ "-log2(p)" ] = - np . log2 ( df [ "p" ] )
df [ "lower %g" % ci ] = self . confidence_intervals_ [ "lower-bound" ]
df [ "upper %g" % ci ] = self . confidence_intervals_ [ "upper-bound" ]
return df |
def execute ( self , limit = 'default' , params = None , ** kwargs ) :
"""If this expression is based on physical tables in a database backend ,
execute it against that backend .
Parameters
limit : integer or None , default ' default '
Pass an integer to effect a specific row limit . limit = None means " no
limit " . The default is whatever is in ibis . options .
Returns
result : expression - dependent
Result of compiling expression and executing in backend""" | from ibis . client import execute
return execute ( self , limit = limit , params = params , ** kwargs ) |
def summarize_sec2hdrgos ( self , sec2d_hdrgos ) :
"""Get counts of header GO IDs and sections .""" | hdrgos_all = set ( [ ] )
hdrgos_grouped = set ( )
hdrgos_ungrouped = set ( )
sections_grouped = set ( )
for sectionname , hdrgos in sec2d_hdrgos :
self . _chk_hdrgoids ( hdrgos )
hdrgos_all . update ( hdrgos )
if sectionname != HdrgosSections . secdflt :
hdrgos_grouped . update ( hdrgos )
sections_grouped . add ( sectionname )
else :
hdrgos_ungrouped . update ( hdrgos )
return { 'G' : hdrgos_grouped , 'S' : sections_grouped , 'U' : hdrgos_all . difference ( hdrgos_grouped ) } |
def check_error ( model , path , shapes , output = 'softmax_output' , verbose = True ) :
"""Check the difference between predictions from MXNet and CoreML .""" | coreml_model = _coremltools . models . MLModel ( path )
input_data = { }
input_data_copy = { }
for ip in shapes :
input_data [ ip ] = _np . random . rand ( * shapes [ ip ] ) . astype ( 'f' )
input_data_copy [ ip ] = _np . copy ( input_data [ ip ] )
dataIter = _mxnet . io . NDArrayIter ( input_data_copy )
mx_out = model . predict ( dataIter ) . flatten ( )
e_out_dict = coreml_model . predict ( _mxnet_remove_batch ( input_data ) )
e_out = e_out_dict [ output ] . flatten ( )
error = _np . linalg . norm ( e_out - mx_out )
if verbose :
print ( "First few predictions from CoreML : %s" % e_out [ 0 : 10 ] )
print ( "First few predictions from MXNet : %s" % e_out [ 0 : 10 ] )
print ( "L2 Error on random data %s" % error )
return error |
def save_json ( histogram : Union [ HistogramBase , HistogramCollection ] , path : Optional [ str ] = None , ** kwargs ) -> str :
"""Save histogram to JSON format .
Parameters
histogram : Any histogram
path : If set , also writes to the path .
Returns
json : The JSON representation of the histogram""" | # TODO : Implement multiple histograms in one file ?
data = histogram . to_dict ( )
data [ "physt_version" ] = CURRENT_VERSION
if isinstance ( histogram , HistogramBase ) :
data [ "physt_compatible" ] = COMPATIBLE_VERSION
elif isinstance ( histogram , HistogramCollection ) :
data [ "physt_compatible" ] = COLLECTION_COMPATIBLE_VERSION
else :
raise TypeError ( "Cannot save unknown type: {0}" . format ( type ( histogram ) ) )
text = json . dumps ( data , ** kwargs )
if path :
with open ( path , "w" , encoding = "utf-8" ) as f :
f . write ( text )
return text |
def add_labels_to_subsets ( ax , subset_by , subset_order , text_kwargs = None , add_hlines = True , hline_kwargs = None ) :
"""Helper function for adding labels to subsets within a heatmap .
Assumes that imshow ( ) was called with ` subsets ` and ` subset _ order ` .
Parameters
ax : matplotlib . Axes
The axes to label . Generally you can use ` fig . array _ axes ` attribute of
the Figure object returned by ` metaseq . plotutils . imshow ` .
subset _ by , subset _ order : array , list
See ` metaseq . plotutils . imshow ( ) ` docstring ; these should be the same
` subsets ` and ` subset _ order ` that were provided to that function .""" | _text_kwargs = dict ( transform = ax . get_yaxis_transform ( ) )
if text_kwargs :
_text_kwargs . update ( text_kwargs )
_hline_kwargs = dict ( color = 'k' )
if hline_kwargs :
_hline_kwargs . update ( hline_kwargs )
pos = 0
for label in subset_order :
ind = subset_by == label
last_pos = pos
pos += sum ( ind )
if add_hlines :
ax . axhline ( pos , ** _hline_kwargs )
ax . text ( 1.1 , last_pos + ( pos - last_pos ) / 2.0 , label , ** _text_kwargs ) |
def element ( self , * args , ** kwargs ) :
"""Add a child element to the : class : ` xml4h . nodes . Element ` node
represented by this Builder .
: return : a new Builder that represents the child element .
Delegates to : meth : ` xml4h . nodes . Element . add _ element ` .""" | child_element = self . _element . add_element ( * args , ** kwargs )
return Builder ( child_element ) |
def check_file_exists ( self , remote_cmd = "dir home" ) :
"""Check if the dest _ file already exists on the file system ( return boolean ) .""" | if self . direction == "put" :
remote_out = self . ssh_ctl_chan . send_command_expect ( remote_cmd )
search_string = r"Directory contents .*{}" . format ( self . dest_file )
return bool ( re . search ( search_string , remote_out , flags = re . DOTALL ) )
elif self . direction == "get" :
return os . path . exists ( self . dest_file ) |
def make_noise_surface ( dims = DEFAULT_DIMS , blur = 10 , seed = None ) :
"""Makes a surface by generating random noise and blurring it .
Args :
dims ( pair ) : the dimensions of the surface to create
blur ( float ) : the amount of Gaussian blur to apply
seed ( int ) : a random seed to use ( optional )
Returns :
surface : A surface .""" | if seed is not None :
np . random . seed ( seed )
return gaussian_filter ( np . random . normal ( size = dims ) , blur ) |
def get_context ( awsclient , env , tool , command , arguments = None ) :
"""This assembles the tool context . Private members are preceded by a ' _ ' .
: param tool :
: param command :
: return : dictionary containing the gcdt tool context""" | # TODO : elapsed , artifact ( stack , depl - grp , lambda , api )
if arguments is None :
arguments = { }
context = { '_awsclient' : awsclient , 'env' : env , 'tool' : tool , 'command' : command , '_arguments' : arguments , # TODO clean up arguments - > args
'version' : __version__ , 'user' : _get_user ( ) , 'plugins' : get_plugin_versions ( ) . keys ( ) }
return context |
def fftlog ( fEM , time , freq , ftarg ) :
r"""Fourier Transform using FFTLog .
FFTLog is the logarithmic analogue to the Fast Fourier Transform FFT .
FFTLog was presented in Appendix B of [ Hami00 ] _ and published at
< http : / / casa . colorado . edu / ~ ajsh / FFTLog > .
This function uses a simplified version of ` ` pyfftlog ` ` , which is a
python - version of ` ` FFTLog ` ` . For more details regarding ` ` pyfftlog ` ` see
< https : / / github . com / prisae / pyfftlog > .
Not the full flexibility of ` ` FFTLog ` ` is available here : Only the
logarithmic FFT ( ` ` fftl ` ` in ` ` FFTLog ` ` ) , not the Hankel transform ( ` ` fht ` `
in ` ` FFTLog ` ` ) . Furthermore , the following parameters are fixed :
- ` ` kr ` ` = 1 ( initial value )
- ` ` kropt ` ` = 1 ( silently adjusts ` ` kr ` ` )
- ` ` dir ` ` = 1 ( forward )
Furthermore , ` ` q ` ` is restricted to - 1 < = q < = 1.
The function is called from one of the modelling routines in : mod : ` model ` .
Consult these modelling routines for a description of the input and output
parameters .
Returns
tEM : array
Returns time - domain EM response of ` ` fEM ` ` for given ` ` time ` ` .
conv : bool
Only relevant for QWE / QUAD .""" | # Get tcalc , dlnr , kr , rk , q ; a and n
_ , _ , q , mu , tcalc , dlnr , kr , rk = ftarg
if mu > 0 : # Sine
a = - fEM . imag
else : # Cosine
a = fEM . real
n = a . size
# 1 . Amplitude and Argument of kr ^ ( - 2 i y ) U _ mu ( q + 2 i y )
ln2kr = np . log ( 2.0 / kr )
d = np . pi / ( n * dlnr )
m = np . arange ( 1 , ( n + 1 ) / 2 )
y = m * d
# y = m * pi / ( n * dlnr )
if q == 0 : # unbiased case ( q = 0)
zp = special . loggamma ( ( mu + 1 ) / 2.0 + 1j * y )
arg = 2.0 * ( ln2kr * y + zp . imag )
else : # biased case ( q ! = 0)
xp = ( mu + 1.0 + q ) / 2.0
xm = ( mu + 1.0 - q ) / 2.0
zp = special . loggamma ( xp + 0j )
zm = special . loggamma ( xm + 0j )
# Amplitude and Argument of U _ mu ( q )
amp = np . exp ( np . log ( 2.0 ) * q + zp . real - zm . real )
# note + Im ( zm ) to get conjugate value below real axis
arg = zp . imag + zm . imag
# first element : cos ( arg ) = ± 1 , sin ( arg ) = 0
argcos1 = amp * np . cos ( arg )
# remaining elements
zp = special . loggamma ( xp + 1j * y )
zm = special . loggamma ( xm + 1j * y )
argamp = np . exp ( np . log ( 2.0 ) * q + zp . real - zm . real )
arg = 2 * ln2kr * y + zp . imag + zm . imag
argcos = np . cos ( arg )
argsin = np . sin ( arg )
# 2 . Centre point of array
jc = np . array ( ( n + 1 ) / 2.0 )
j = np . arange ( n ) + 1
# 3 . a ( r ) = A ( r ) ( r / rc ) ^ [ - dir * ( q - . 5 ) ]
a *= np . exp ( - ( q - 0.5 ) * ( j - jc ) * dlnr )
# 4 . transform a ( r ) - > ã ( k )
# 4 . a normal FFT
a = fftpack . rfft ( a )
# 4 . b
m = np . arange ( 1 , n / 2 , dtype = int )
# index variable
if q == 0 : # unbiased ( q = 0 ) transform
# multiply by ( kr ) ^ [ - i 2 m pi / ( n dlnr ) ] U _ mu [ i 2 m pi / ( n dlnr ) ]
ar = a [ 2 * m - 1 ]
ai = a [ 2 * m ]
a [ 2 * m - 1 ] = ar * argcos [ : - 1 ] - ai * argsin [ : - 1 ]
a [ 2 * m ] = ar * argsin [ : - 1 ] + ai * argcos [ : - 1 ]
# problematical last element , for even n
if np . mod ( n , 2 ) == 0 :
ar = argcos [ - 1 ]
a [ - 1 ] *= ar
else : # biased ( q ! = 0 ) transform
# multiply by ( kr ) ^ [ - i 2 m pi / ( n dlnr ) ] U _ mu [ q + i 2 m pi / ( n dlnr ) ]
# phase
ar = a [ 2 * m - 1 ]
ai = a [ 2 * m ]
a [ 2 * m - 1 ] = ar * argcos [ : - 1 ] - ai * argsin [ : - 1 ]
a [ 2 * m ] = ar * argsin [ : - 1 ] + ai * argcos [ : - 1 ]
a [ 0 ] *= argcos1
a [ 2 * m - 1 ] *= argamp [ : - 1 ]
a [ 2 * m ] *= argamp [ : - 1 ]
# problematical last element , for even n
if np . mod ( n , 2 ) == 0 :
m = int ( n / 2 ) - 3
ar = argcos [ m - 1 ] * argamp [ m - 1 ]
a [ - 1 ] *= ar
# 4 . c normal FFT back
a = fftpack . irfft ( a )
# Ã ( k ) = ã ( k ) k ^ [ - dir * ( q + . 5 ) ] rc ^ [ - dir * ( q - . 5 ) ]
# = ã ( k ) ( k / kc ) ^ [ - dir * ( q + . 5 ) ] ( kc rc ) ^ ( - dir * q ) ( rc / kc ) ^ ( dir * . 5)
a = a [ : : - 1 ] * np . exp ( - ( ( q + 0.5 ) * ( j - jc ) * dlnr + q * np . log ( kr ) - np . log ( rk ) / 2.0 ) )
# Interpolate for the desired times
ttEM = iuSpline ( np . log ( tcalc ) , a )
tEM = ttEM ( np . log ( time ) )
# ( Second argument is only for QWE )
return tEM , True |
def funnel_rebuild ( psg_trm_spec ) :
"""Rebuilds a model and compares it to a reference model .
Parameters
psg _ trm : ( ( [ float ] , float , int ) , AMPAL , specification )
A tuple containing the parameters , score and generation for a
model as well as a model of the best scoring parameters .
Returns
energy _ rmsd _ gen : ( float , float , int )
A triple containing the BUFF score , RMSD to the top model
and generation of a model generated during the minimisation .""" | param_score_gen , top_result_model , specification = psg_trm_spec
params , score , gen = param_score_gen
model = specification ( * params )
rmsd = top_result_model . rmsd ( model )
return rmsd , score , gen |
def revert ( self , snapshot : Tuple [ Hash32 , UUID ] ) -> None :
"""Revert the VM to the state at the snapshot""" | state_root , account_snapshot = snapshot
# first revert the database state root .
self . _account_db . state_root = state_root
# now roll the underlying database back
self . _account_db . discard ( account_snapshot ) |
def create_run_from_task_definition ( self , task_def_file , options , propertyfile , required_files_pattern ) :
"""Create a Run from a task definition in yaml format""" | task_def = load_task_definition_file ( task_def_file )
def expand_patterns_from_tag ( tag ) :
result = [ ]
patterns = task_def . get ( tag , [ ] )
if isinstance ( patterns , str ) or not isinstance ( patterns , collections . Iterable ) : # accept single string in addition to list of strings
patterns = [ patterns ]
for pattern in patterns :
expanded = util . expand_filename_pattern ( str ( pattern ) , os . path . dirname ( task_def_file ) )
if not expanded :
raise BenchExecException ( "Pattern '{}' in task-definition file {} did not match any paths." . format ( pattern , task_def_file ) )
expanded . sort ( )
result . extend ( expanded )
return result
input_files = expand_patterns_from_tag ( "input_files" )
if not input_files :
raise BenchExecException ( "Task-definition file {} does not define any input files." . format ( task_def_file ) )
required_files = expand_patterns_from_tag ( "required_files" )
run = Run ( task_def_file , input_files , options , self , propertyfile , required_files_pattern , required_files )
# run . propertyfile of Run is fully determined only after Run is created ,
# thus we handle it and the expected results here .
if not run . propertyfile :
return run
# TODO : support " property _ name " attribute in yaml
prop = result . Property . create ( run . propertyfile , allow_unknown = True )
run . properties = [ prop ]
for prop_dict in task_def . get ( "properties" , [ ] ) :
if not isinstance ( prop_dict , dict ) or "property_file" not in prop_dict :
raise BenchExecException ( "Missing property file for property in task-definition file {}." . format ( task_def_file ) )
expanded = util . expand_filename_pattern ( prop_dict [ "property_file" ] , os . path . dirname ( task_def_file ) )
if len ( expanded ) != 1 :
raise BenchExecException ( "Property pattern '{}' in task-definition file {} does not refer to exactly one file." . format ( prop_dict [ "property_file" ] , task_def_file ) )
# TODO We could reduce I / O by checking absolute paths and using os . path . samestat
# with cached stat calls .
if prop . filename == expanded [ 0 ] or os . path . samefile ( prop . filename , expanded [ 0 ] ) :
expected_result = prop_dict . get ( "expected_verdict" )
if expected_result is not None and not isinstance ( expected_result , bool ) :
raise BenchExecException ( "Invalid expected result '{}' for property {} in task-definition file {}." . format ( expected_result , prop_dict [ "property_file" ] , task_def_file ) )
run . expected_results [ prop . filename ] = result . ExpectedResult ( expected_result , prop_dict . get ( "subproperty" ) )
if not run . expected_results :
logging . debug ( "Ignoring run '%s' because it does not have the property from %s." , run . identifier , run . propertyfile )
return None
elif len ( run . expected_results ) > 1 :
raise BenchExecException ( "Property '{}' specified multiple times in task-definition file {}." . format ( prop . filename , task_def_file ) )
else :
return run |
def enroll ( self , uuid , organization , from_date = MIN_PERIOD_DATE , to_date = MAX_PERIOD_DATE , merge = False ) :
"""Enroll a unique identity in an organization .
This method adds a new relationship between the unique identity ,
identified by < uuid > , and < organization > . Both entities must exist
on the registry before creating the new enrollment .
The period of the enrollment can be given with the parameters < from _ date >
and < to _ date > , where " from _ date < = to _ date " . Default values for these
dates are ' 1900-01-01 ' and ' 2100-01-01 ' .
When " merge " parameter is set to True , those overlapped enrollments related
to < uuid > and < organization > found on the registry will be merged . The given
enrollment will be also merged .
: param uuid : unique identifier
: param organization : name of the organization
: param from _ date : date when the enrollment starts
: param to _ date : date when the enrollment ends
: param merge : merge overlapped enrollments ; by default , it is set to False""" | # Empty or None values for uuid and organizations are not allowed
if not uuid or not organization :
return CMD_SUCCESS
try :
api . add_enrollment ( self . db , uuid , organization , from_date , to_date )
code = CMD_SUCCESS
except ( NotFoundError , InvalidValueError ) as e :
self . error ( str ( e ) )
code = e . code
except AlreadyExistsError as e :
if not merge :
msg_data = { 'uuid' : uuid , 'org' : organization , 'from_dt' : str ( from_date ) , 'to_dt' : str ( to_date ) }
msg = "enrollment for '%(uuid)s' at '%(org)s' (from: %(from_dt)s, to: %(to_dt)s) already exists in the registry"
msg = msg % msg_data
self . error ( msg )
code = e . code
if not merge :
return code
try :
api . merge_enrollments ( self . db , uuid , organization )
except ( NotFoundError , InvalidValueError ) as e : # These exceptions were checked above . If any of these raises
# is due to something really wrong has happened
raise RuntimeError ( str ( e ) )
return CMD_SUCCESS |
def ObjectTransitionedEventHandler ( obj , event ) :
"""Object has been transitioned to an new state""" | # only snapshot supported objects
if not supports_snapshots ( obj ) :
return
# default transition entry
entry = { "modified" : DateTime ( ) . ISO ( ) , "action" : event . action , }
# get the last history item
history = api . get_review_history ( obj , rev = True )
if history :
entry = history [ 0 ]
# make transitions also a modification entry
timestamp = entry . pop ( "time" , DateTime ( ) )
entry [ "modified" ] = timestamp . ISO ( )
entry [ "action" ] = event . action
# take a new snapshot
take_snapshot ( obj , ** entry )
# reindex the object in the auditlog catalog
reindex_object ( obj ) |
def getAttributes ( self , found = None ) :
'''Method to extract additional attributes from a given expression ( i . e . : domains and ports from URL and so on ) . This method may be overwritten in certain child classes .
: param found : expression to be processed .
: return : The output format will be like :
[ { " type " : " i3visio . email " , " value " : " foo @ bar . com " , " attributes " : [ ] } , { " type " : " i3visio . domain " , " value " : " foo . com " , " attributes " : [ ] } ]''' | # List of attributes
results = [ ]
if not '@' in found : # character may be ' @ ' or ' . '
for character in self . substitutionValues . keys ( ) :
for value in self . substitutionValues [ character ] : # replacing ' [ at ] ' for ' @ ' . . .
found = found . replace ( value , character )
# Building the auxiliar email
aux = { }
aux [ "type" ] = "i3visio.email"
aux [ "value" ] = found
aux [ "attributes" ] = [ ]
results . append ( aux )
else : # Getting the information of the alias :
aux = { }
aux [ "type" ] = "i3visio.alias"
aux [ "value" ] = found . split ( '@' ) [ 0 ]
aux [ "attributes" ] = [ ]
results . append ( aux )
# Getting the information of the domain :
aux = { }
aux [ "type" ] = "i3visio.domain"
aux [ "value" ] = found . split ( '@' ) [ 1 ]
aux [ "attributes" ] = [ ]
results . append ( aux )
return results |
def get_file_nodes ( self , path , node = None ) :
"""Returns the : class : ` umbra . components . factory . script _ editor . nodes . FileNode ` class Nodes with given path .
: param node : Node to start walking from .
: type node : AbstractNode or AbstractCompositeNode or Object
: param path : File path .
: type path : unicode
: return : FileNode nodes .
: rtype : list""" | return [ file_node for file_node in self . list_file_nodes ( node ) if file_node . path == path ] |
def _store_object ( self , obj_name , content , etag = None , chunked = False , chunk_size = None , headers = None ) :
"""Handles the low - level creation of a storage object and the uploading of
the contents of that object .""" | head_etag = headers . pop ( "ETag" , "" )
if chunked :
headers . pop ( "Content-Length" , "" )
headers [ "Transfer-Encoding" ] = "chunked"
elif etag is None and content is not None :
etag = utils . get_checksum ( content )
if etag :
headers [ "ETag" ] = etag
if not headers . get ( "Content-Type" ) :
headers [ "Content-Type" ] = None
uri = "/%s/%s" % ( self . uri_base , obj_name )
resp , resp_body = self . api . method_put ( uri , data = content , headers = headers ) |
def sg_set_online ( self , online ) :
"""Set the sensor - graph online / offline .""" | self . sensor_graph . enabled = bool ( online )
return [ Error . NO_ERROR ] |
def setLocation ( self , x , y ) :
"""Set the location of this object to the specified coordinates .""" | self . x = int ( x )
self . y = int ( y )
return self |
def confirm_vlan ( self , number_net , id_environment_vlan , ip_version = None ) :
"""Checking if the vlan insert need to be confirmed
: param number _ net : Filter by vlan number column
: param id _ environment _ vlan : Filter by environment ID related
: param ip _ version : Ip version for checking
: return : True is need confirmation , False if no need
: raise AmbienteNaoExisteError : Ambiente não cadastrado .
: raise InvalidParameterError : Invalid ID for VLAN .
: raise DataBaseError : Networkapi failed to access the database .
: raise XMLError : Networkapi failed to generate the XML response .""" | url = 'vlan/confirm/' + str ( number_net ) + '/' + id_environment_vlan + '/' + str ( ip_version )
code , xml = self . submit ( None , 'GET' , url )
return self . response ( code , xml ) |
def delete_webhook ( self , ) :
"""Use this method to remove webhook integration if you decide to switch back to getUpdates . Returns True on success . Requires no parameters .
https : / / core . telegram . org / bots / api # deletewebhook
Returns :
: return : Returns True on success
: rtype : bool""" | result = self . do ( "deleteWebhook" , )
if self . return_python_objects :
logger . debug ( "Trying to parse {data}" . format ( data = repr ( result ) ) )
try :
return from_array_list ( bool , result , list_level = 0 , is_builtin = True )
except TgApiParseException :
logger . debug ( "Failed parsing as primitive bool" , exc_info = True )
# end try
# no valid parsing so far
raise TgApiParseException ( "Could not parse result." )
# See debug log for details !
# end if return _ python _ objects
return result |
def rpc_method ( func , doc = None , format = 'json' , request_handler = None ) :
'''A decorator which exposes a function ` ` func ` ` as an rpc function .
: param func : The function to expose .
: param doc : Optional doc string . If not provided the doc string of
` ` func ` ` will be used .
: param format : Optional output format .
: param request _ handler : function which takes ` ` request ` ` , ` ` format ` `
and ` ` kwargs ` ` and return a new ` ` kwargs ` ` to be passed to ` ` func ` ` .
It can be used to add additional parameters based on request and
format .''' | def _ ( self , * args , ** kwargs ) :
request = args [ 0 ]
if request_handler :
kwargs = request_handler ( request , format , kwargs )
try :
return func ( * args , ** kwargs )
except TypeError :
msg = checkarity ( func , args , kwargs )
if msg :
raise InvalidParams ( 'Invalid Parameters. %s' % msg )
else :
raise
_ . __doc__ = doc or func . __doc__
_ . __name__ = func . __name__
_ . FromApi = True
return _ |
def flip_video ( self , is_flip , callback = None ) :
'''Flip video
` ` is _ flip ` ` : 0 Not flip , 1 Flip''' | params = { 'isFlip' : is_flip }
return self . execute_command ( 'flipVideo' , params , callback = callback ) |
def find_by ( self , column = None , value = None , order_by = None , limit = 0 ) :
"""Find all items that matches your a column / value .
: param column : column to search .
: param value : value to look for in ` column ` .
: param limit : How many rows to fetch .
: param order _ by : column on which to order the results . To change the sort , prepend with < or > .""" | with rconnect ( ) as conn :
if column is None or value is None :
raise ValueError ( "You need to supply both a column and a value" )
try :
query = self . _base ( )
if order_by is not None :
query = self . _order_by ( query , order_by )
if limit > 0 :
query = self . _limit ( query , limit )
query = query . filter ( { column : value } )
log . debug ( query )
rv = query . run ( conn )
except Exception as e :
log . warn ( e )
raise
else :
data = [ self . _model ( _ ) for _ in rv ]
return data |
def from_scene_coords ( self , x = 0 , y = 0 ) :
"""Converts x , y given in the scene coordinates to sprite ' s local ones
coordinates""" | matrix = self . get_matrix ( )
matrix . invert ( )
return matrix . transform_point ( x , y ) |
def init ( ctx , client , directory , name , force , use_external_storage ) :
"""Initialize a project .""" | if not client . use_external_storage :
use_external_storage = False
ctx . obj = client = attr . evolve ( client , path = directory , use_external_storage = use_external_storage , )
msg = 'Initialized empty project in {path}'
branch_name = None
stack = contextlib . ExitStack ( )
if force and client . repo :
msg = 'Initialized project in {path} (branch {branch_name})'
merge_args = [ '--no-ff' , '-s' , 'recursive' , '-X' , 'ours' ]
try :
commit = client . find_previous_commit ( str ( client . renku_metadata_path ) , )
branch_name = 'renku/init/' + str ( commit )
except KeyError :
from git import NULL_TREE
commit = NULL_TREE
branch_name = 'renku/init/root'
merge_args . append ( '--allow-unrelated-histories' )
ctx . obj = client = stack . enter_context ( client . worktree ( branch_name = branch_name , commit = commit , merge_args = merge_args , ) )
try :
with client . lock :
path = client . init_repository ( name = name , force = force )
except FileExistsError :
raise click . UsageError ( 'Renku repository is not empty. ' 'Please use --force flag to use the directory as Renku ' 'repository.' )
stack . enter_context ( client . commit ( ) )
with stack : # Install Git hooks .
from . githooks import install
ctx . invoke ( install , force = force )
# Create all necessary template files .
from . runner import template
ctx . invoke ( template , force = force )
click . echo ( msg . format ( path = path , branch_name = branch_name ) ) |
def size_control_valve_g ( T , MW , mu , gamma , Z , P1 , P2 , Q , D1 = None , D2 = None , d = None , FL = 0.9 , Fd = 1 , xT = 0.7 , allow_choked = True , allow_laminar = True , full_output = False ) :
r'''Calculates flow coefficient of a control valve passing a gas
according to IEC 60534 . Uses a large number of inputs in SI units . Note the
return value is not standard SI . All parameters are required . For details
of the calculations , consult [ 1 ] _ . Note the inlet gas flow conditions .
Parameters
T : float
Temperature of the gas at the inlet [ K ]
MW : float
Molecular weight of the gas [ g / mol ]
mu : float
Viscosity of the fluid at inlet conditions [ Pa * s ]
gamma : float
Specific heat capacity ratio [ - ]
Z : float
Compressibility factor at inlet conditions , [ - ]
P1 : float
Inlet pressure of the gas before valves and reducers [ Pa ]
P2 : float
Outlet pressure of the gas after valves and reducers [ Pa ]
Q : float
Volumetric flow rate of the gas at * 273.15 K * and 1 atm specifically
[ m ^ 3 / s ]
D1 : float , optional
Diameter of the pipe before the valve [ m ]
D2 : float , optional
Diameter of the pipe after the valve [ m ]
d : float , optional
Diameter of the valve [ m ]
FL : float , optional
Liquid pressure recovery factor of a control valve without attached
fittings ( normally 0.8-0.9 at full open and decreasing as opened
further to below 0.5 ; use default very cautiously ! ) [ ]
Fd : float , optional
Valve style modifier ( 0.1 to 1 ; varies tremendously depending on the
type of valve and position ; do not use the default at all ! ) [ ]
xT : float , optional
Pressure difference ratio factor of a valve without fittings at choked
flow ( increasing to 0.9 or higher as the valve is closed further and
decreasing to 0.1 or lower as the valve is opened further ; use default
very cautiously ! ) [ - ]
allow _ choked : bool , optional
Overrides the automatic transition into the choked regime if this is
False and returns as if choked flow does not exist
allow _ laminar : bool , optional
Overrides the automatic transition into the laminar regime if this is
False and returns as if laminar flow does not exist
full _ output : bool , optional
If True , returns intermediate calculation values as
well as Kv in the form of a dictionary containing ' Kv ' , ' Rev ' , ' choked ' ,
' Y ' , ' FR ' , ' FP ' , ' xTP ' , and ' laminar ' . Some may be None if they are
not used in the calculation .
Returns
Kv : float
Metric Kv valve flow coefficient ( flow rate of water at a pressure drop
of 1 bar ) [ m ^ 3 / hr ]
Notes
It is possible to use this model without any diameters specified ; in that
case , turbulent flow is assumed . Choked flow can still be modeled . This is
not recommended . All three diameters need to be None for this to work .
` FL ` and ` Fd ` are not used by the models when the diameters are not
specified , but ` xT ` definitely is used by the model .
Examples
From [ 1 ] _ , matching example 3 for non - choked gas flow with attached
fittings and a rotary , eccentric plug , flow - to - open control valve :
> > > size _ control _ valve _ g ( T = 433 . , MW = 44.01 , mu = 1.4665E - 4 , gamma = 1.30,
. . . Z = 0.988 , P1 = 680E3 , P2 = 310E3 , Q = 38/36 . , D1 = 0.08 , D2 = 0.1 , d = 0.05,
. . . FL = 0.85 , Fd = 0.42 , xT = 0.60)
72.58664545391052
From [ 1 ] _ , roughly matching example 4 for a small flow trim sized tapered
needle plug valve . Difference is 3 % and explained by the difference in
algorithms used .
> > > size _ control _ valve _ g ( T = 320 . , MW = 39.95 , mu = 5.625E - 5 , gamma = 1.67 , Z = 1.0,
. . . P1 = 2.8E5 , P2 = 1.3E5 , Q = 0.46/3600 . , D1 = 0.015 , D2 = 0.015 , d = 0.015 , FL = 0.98,
. . . Fd = 0.07 , xT = 0.8)
0.016498765335995726
References
. . [ 1 ] IEC 60534-2-1 / ISA - 75.01.01-2007''' | MAX_C_POSSIBLE = 1E40
# Quit iterations if C reaches this high
# Pa to kPa , according to constants in standard
P1 , P2 = P1 / 1000. , P2 / 1000.
Q = Q * 3600.
# m ^ 3 / s to m ^ 3 / hr , according to constants in standard
# Convert dynamic viscosity to kinematic viscosity
Vm = Z * R * T / ( P1 * 1000 )
rho = ( Vm ) ** - 1 * MW / 1000.
nu = mu / rho
# kinematic viscosity used in standard
dP = P1 - P2
Fgamma = gamma / 1.40
x = dP / P1
Y = max ( 1 - x / ( 3 * Fgamma * xT ) , 2 / 3. )
choked = is_choked_turbulent_g ( x , Fgamma , xT )
if choked and allow_choked : # Choked , and flow coefficient from eq 14a
C = Q / ( N9 * P1 * Y ) * ( MW * T * Z / xT / Fgamma ) ** 0.5
else : # Non - choked , and flow coefficient from eq 8a
C = Q / ( N9 * P1 * Y ) * ( MW * T * Z / x ) ** 0.5
if full_output :
ans = { 'FP' : None , 'xTP' : None , 'FR' : None , 'choked' : choked , 'Y' : Y }
if D1 is None and D2 is None and d is None : # Assume turbulent if no diameters are provided , no other calculations
Rev = 1e5
if full_output :
ans [ 'Rev' ] = None
else : # m to mm , according to constants in standard
D1 , D2 , d = D1 * 1000. , D2 * 1000. , d * 1000.
# Convert diameters to mm which is used in the standard
Rev = Reynolds_valve ( nu = nu , Q = Q , D1 = D1 , FL = FL , Fd = Fd , C = C )
if full_output :
ans [ 'Rev' ] = Rev
if ( Rev > 10000 or not allow_laminar ) and ( D1 != d or D2 != d ) : # gas , using xTP and FLP
FP = 1.
MAX_ITER = 20
def iterate_piping_coef ( Ci , iterations ) :
loss = loss_coefficient_piping ( d , D1 , D2 )
FP = ( 1. + loss / N2 * ( Ci / d ** 2 ) ** 2 ) ** - 0.5
loss_upstream = loss_coefficient_piping ( d , D1 )
xTP = xT / FP ** 2 / ( 1 + xT * loss_upstream / N5 * ( Ci / d ** 2 ) ** 2 )
choked = is_choked_turbulent_g ( x , Fgamma , xTP = xTP )
if choked : # Choked flow with piping , equation 17a
C = Q / ( N9 * FP * P1 * Y ) * ( MW * T * Z / xTP / Fgamma ) ** 0.5
else : # Non - choked flow with piping , equation 11a
C = Q / ( N9 * FP * P1 * Y ) * ( MW * T * Z / x ) ** 0.5
if Ci / C < 0.99 and iterations < MAX_ITER and Ci < MAX_C_POSSIBLE :
C = iterate_piping_coef ( C , iterations + 1 )
if full_output :
ans [ 'xTP' ] = xTP
ans [ 'FP' ] = FP
ans [ 'choked' ] = choked
if MAX_ITER == iterations or Ci >= MAX_C_POSSIBLE :
ans [ 'warning' ] = 'Not converged in inner loop'
return C
C = iterate_piping_coef ( C , 0 )
elif Rev <= 10000 and allow_laminar : # Laminar ;
def iterate_piping_laminar ( C ) :
Ci = 1.3 * C
Rev = Reynolds_valve ( nu = nu , Q = Q , D1 = D1 , FL = FL , Fd = Fd , C = Ci )
if Ci / d ** 2 > 0.016 * N18 :
FR = Reynolds_factor ( FL = FL , C = Ci , d = d , Rev = Rev , full_trim = False )
else :
FR = Reynolds_factor ( FL = FL , C = Ci , d = d , Rev = Rev , full_trim = True )
if C / FR >= Ci :
Ci = iterate_piping_laminar ( Ci )
if full_output :
ans [ 'FR' ] = FR
ans [ 'Rev' ] = Rev
return Ci
C = iterate_piping_laminar ( C )
if full_output :
ans [ 'Kv' ] = C
ans [ 'laminar' ] = Rev <= 10000
return ans
else :
return C |
def libvlc_vlm_get_media_instance_time ( p_instance , psz_name , i_instance ) :
'''Get vlm _ media instance time by name or instance id .
@ param p _ instance : a libvlc instance .
@ param psz _ name : name of vlm media instance .
@ param i _ instance : instance id .
@ return : time as integer or - 1 on error .''' | f = _Cfunctions . get ( 'libvlc_vlm_get_media_instance_time' , None ) or _Cfunction ( 'libvlc_vlm_get_media_instance_time' , ( ( 1 , ) , ( 1 , ) , ( 1 , ) , ) , None , ctypes . c_int , Instance , ctypes . c_char_p , ctypes . c_int )
return f ( p_instance , psz_name , i_instance ) |
def _set_current_options ( self , options ) :
"""Set current options for a model .
Parameters
options : dict
A dictionary of the desired option settings . The key should be the name
of the option and each value is the desired value of the option .""" | opts = self . _get_current_options ( )
opts . update ( options )
response = self . __proxy__ . set_current_options ( opts )
return response |
def skew ( self , x = 0 , y = 0 ) :
"""Skew the element by x and y degrees
Convenience function which calls skew _ x and skew _ y
Parameters
x , y : float , float
skew angle in degrees ( default 0)
If an x / y angle is given as zero degrees , that transformation is omitted .""" | if x is not 0 :
self . skew_x ( x )
if y is not 0 :
self . skew_y ( y )
return self |
def interactive_imshow ( img , lclick_cb = None , rclick_cb = None , ** kwargs ) :
"""Args :
img ( np . ndarray ) : an image ( expect BGR ) to show .
lclick _ cb , rclick _ cb : a callback ` ` func ( img , x , y ) ` ` for left / right click event .
kwargs : can be { key _ cb _ a : callback _ img , key _ cb _ b : callback _ img } , to
specify a callback ` ` func ( img ) ` ` for keypress .
Some existing keypress event handler :
* q : destroy the current window
* x : execute ` ` sys . exit ( ) ` `
* s : save image to " out . png " """ | name = 'tensorpack_viz_window'
cv2 . imshow ( name , img )
def mouse_cb ( event , x , y , * args ) :
if event == cv2 . EVENT_LBUTTONUP and lclick_cb is not None :
lclick_cb ( img , x , y )
elif event == cv2 . EVENT_RBUTTONUP and rclick_cb is not None :
rclick_cb ( img , x , y )
cv2 . setMouseCallback ( name , mouse_cb )
key = cv2 . waitKey ( - 1 )
while key >= 128 :
key = cv2 . waitKey ( - 1 )
key = chr ( key & 0xff )
cb_name = 'key_cb_' + key
if cb_name in kwargs :
kwargs [ cb_name ] ( img )
elif key == 'q' :
cv2 . destroyWindow ( name )
elif key == 'x' :
sys . exit ( )
elif key == 's' :
cv2 . imwrite ( 'out.png' , img )
elif key in [ '+' , '=' ] :
img = cv2 . resize ( img , None , fx = 1.3 , fy = 1.3 , interpolation = cv2 . INTER_CUBIC )
interactive_imshow ( img , lclick_cb , rclick_cb , ** kwargs )
elif key == '-' :
img = cv2 . resize ( img , None , fx = 0.7 , fy = 0.7 , interpolation = cv2 . INTER_CUBIC )
interactive_imshow ( img , lclick_cb , rclick_cb , ** kwargs ) |
def guess_bonds ( r_array , type_array , threshold = 0.1 , maxradius = 0.3 , radii_dict = None ) :
'''Detect bonds given the coordinates ( r _ array ) and types of the
atoms involved ( type _ array ) , based on their covalent radii .
To fine - tune the detection , it is possible to set a * * threshold * * and a
maximum search radius * * maxradius * * , and the radii lookup * * radii _ dict * * .''' | if radii_dict is None :
covalent_radii = cdb . get ( 'data' , 'covalentdict' )
else :
covalent_radii = radii_dict
# Find all the pairs
ck = cKDTree ( r_array )
pairs = ck . query_pairs ( maxradius )
bonds = [ ]
for i , j in pairs :
a , b = covalent_radii [ type_array [ i ] ] , covalent_radii [ type_array [ j ] ]
rval = a + b
thr_a = rval - threshold
thr_b = rval + threshold
thr_a2 = thr_a ** 2
thr_b2 = thr_b ** 2
dr2 = ( ( r_array [ i ] - r_array [ j ] ) ** 2 ) . sum ( )
# print ( thr _ a , dr2 * * 0.5 , thr _ b )
if thr_a2 < dr2 < thr_b2 :
bonds . append ( ( i , j ) )
return np . array ( bonds ) |
def diff ( self , other , ** kwargs ) :
"""Compare an object with another and return a : py : class : ` DiffInfo `
object . Accepts the same arguments as
: py : meth : ` normalize . record . Record . diff _ iter `""" | from normalize . diff import diff
return diff ( self , other , ** kwargs ) |
def drawSector ( page , center , point , beta , color = None , fill = None , dashes = None , fullSector = True , morph = None , width = 1 , closePath = False , roundCap = False , overlay = True ) :
"""Draw a circle sector given circle center , one arc end point and the angle of the arc .
Parameters :
center - - center of circle
point - - arc end point
beta - - angle of arc ( degrees )
fullSector - - connect arc ends with center""" | img = page . newShape ( )
Q = img . drawSector ( Point ( center ) , Point ( point ) , beta , fullSector = fullSector )
img . finish ( color = color , fill = fill , dashes = dashes , width = width , roundCap = roundCap , morph = morph , closePath = closePath )
img . commit ( overlay )
return Q |
def _calibrate ( self , data ) :
"""Visible / IR channel calibration .""" | lut = self . prologue [ 'ImageCalibration' ] [ self . chid ]
if abs ( lut ) . max ( ) > 16777216 :
lut = lut . astype ( np . float64 )
else :
lut = lut . astype ( np . float32 )
lut /= 1000
lut [ 0 ] = np . nan
# Dask / XArray don ' t support indexing in 2D ( yet ) .
res = data . data . map_blocks ( self . _getitem , lut , dtype = lut . dtype )
res = xr . DataArray ( res , dims = data . dims , attrs = data . attrs , coords = data . coords )
res = res . where ( data > 0 )
return res |
def process_request ( self , request ) :
"""Process the web - based auth request .""" | # User has already been authed by alternate middleware
if hasattr ( request , "facebook" ) and request . facebook :
return
request . facebook = False
if not self . is_valid_path ( request ) :
return
if self . is_access_denied ( request ) :
return authorization_denied_view ( request )
request . facebook = Facebook ( )
oauth_token = False
# Is there a token cookie already present ?
if 'oauth_token' in request . COOKIES :
try : # Check if the current token is already in DB
oauth_token = OAuthToken . objects . get ( token = request . COOKIES [ 'oauth_token' ] )
except OAuthToken . DoesNotExist :
request . facebook = False
return
# Is there a code in the GET request ?
elif 'code' in request . GET :
try :
graph = GraphAPI ( )
# Exchange code for an access _ token
response = graph . get ( 'oauth/access_token' , client_id = FACEBOOK_APPLICATION_ID , redirect_uri = get_post_authorization_redirect_url ( request , canvas = False ) , client_secret = FACEBOOK_APPLICATION_SECRET_KEY , code = request . GET [ 'code' ] , )
components = parse_qs ( response )
# Save new OAuth - token in DB
oauth_token , new_oauth_token = OAuthToken . objects . get_or_create ( token = components [ 'access_token' ] [ 0 ] , issued_at = now ( ) , expires_at = now ( ) + timedelta ( seconds = int ( components [ 'expires' ] [ 0 ] ) ) )
except GraphAPI . OAuthError :
pass
# There isn ' t a valid access _ token
if not oauth_token or oauth_token . expired :
request . facebook = False
return
# Is there a user already connected to the current token ?
try :
user = oauth_token . user
if not user . authorized :
request . facebook = False
return
user . last_seen_at = now ( )
user . save ( )
except User . DoesNotExist :
graph = GraphAPI ( oauth_token . token )
profile = graph . get ( 'me' )
# Either the user already exists and its just a new token , or user and token both are new
try :
user = User . objects . get ( facebook_id = profile . get ( 'id' ) )
if not user . authorized :
if new_oauth_token :
user . last_seen_at = now ( )
user . authorized = True
else :
request . facebook = False
return
except User . DoesNotExist : # Create a new user to go with token
user = User . objects . create ( facebook_id = profile . get ( 'id' ) , oauth_token = oauth_token )
user . synchronize ( profile )
# Delete old access token if there is any and only if the new one is different
old_oauth_token = None
if user . oauth_token != oauth_token :
old_oauth_token = user . oauth_token
user . oauth_token = oauth_token
user . save ( )
if old_oauth_token :
old_oauth_token . delete ( )
if not user . oauth_token . extended : # Attempt to extend the OAuth token , but ignore exceptions raised by
# bug # 102727766518358 in the Facebook Platform .
# http : / / developers . facebook . com / bugs / 102727766518358/
try :
user . oauth_token . extend ( )
except :
pass
request . facebook . user = user
request . facebook . oauth_token = oauth_token |
def serialize ( self , ** kwargs ) -> str :
"""rdflib . Graph ( ) . serialize wrapper
Original serialize cannot handle PosixPath from pathlib . You should ignore everything , but
destination and format . format is a must , but if you don ' t include a destination , it will
just return the formated graph as an str output .
Args :
destination : Output file path ,
format : format for for the triple to be put together as : ' xml ' , ' n3 ' , ' turtle ' , ' nt ' ,
' pretty - xml ' , ' trix ' , ' trig ' and ' nquads ' are built in . json - ld in rdflib _ jsonld
base : none
encoding : None
* * args : None""" | kwargs = { key : str ( value ) for key , value in kwargs . items ( ) }
return self . g . serialize ( ** kwargs ) |
def insert_table ( self , rows , cols ) :
"""Return a | PlaceholderGraphicFrame | object containing a table of
* rows * rows and * cols * columns . The position and width of the table
are those of the placeholder and its height is proportional to the
number of rows . A | PlaceholderGraphicFrame | object has all the
properties and methods of a | GraphicFrame | shape except that the
value of its : attr : ` ~ . _ BaseSlidePlaceholder . shape _ type ` property is
unconditionally ` MSO _ SHAPE _ TYPE . PLACEHOLDER ` . Note that the return
value is not the new table but rather * contains * the new table . The
table can be accessed using the
: attr : ` ~ . PlaceholderGraphicFrame . table ` property of the returned
| PlaceholderGraphicFrame | object .""" | graphicFrame = self . _new_placeholder_table ( rows , cols )
self . _replace_placeholder_with ( graphicFrame )
return PlaceholderGraphicFrame ( graphicFrame , self . _parent ) |
def histogram ( self , column = 'r' , filename = None , log10 = False , ** kwargs ) :
"""Plot a histogram of one data column""" | return_dict = HS . plot_histograms ( self . data , column )
if filename is not None :
return_dict [ 'all' ] . savefig ( filename , dpi = 300 )
return return_dict |
def uncomment ( path , regex , char = '#' , backup = '.bak' ) :
'''. . deprecated : : 0.17.0
Use : py : func : ` ~ salt . modules . file . replace ` instead .
Uncomment specified commented lines in a file
path
The full path to the file to be edited
regex
A regular expression used to find the lines that are to be uncommented .
This regex should not include the comment character . A leading ` ` ^ ` `
character will be stripped for convenience ( for easily switching
between comment ( ) and uncomment ( ) ) .
char : ` ` # ` `
The character to remove in order to uncomment a line
backup : ` ` . bak ` `
The file will be backed up before edit with this file extension ;
* * WARNING : * * each time ` ` sed ` ` / ` ` comment ` ` / ` ` uncomment ` ` is called will
overwrite this backup
CLI Example :
. . code - block : : bash
salt ' * ' file . uncomment / etc / hosts . deny ' ALL : PARANOID ' ''' | return comment_line ( path = path , regex = regex , char = char , cmnt = False , backup = backup ) |
def update ( self , source_file , configuration , declarations , included_files ) :
"""Replace a cache entry by a new value .
: param source _ file : a C + + source file name .
: type source _ file : str
: param configuration : configuration object .
: type configuration : : class : ` xml _ generator _ configuration _ t `
: param declarations : declarations contained in the ` source _ file `
: type declarations : pickable object
: param included _ files : included files
: type included _ files : list of str""" | # Normlize all paths . . .
source_file = os . path . normpath ( source_file )
included_files = [ os . path . normpath ( p ) for p in included_files ]
# Create the list of dependent files . This is the included _ files list
# + the source file . Duplicate names are removed .
dependent_files = { }
for name in [ source_file ] + included_files :
dependent_files [ name ] = 1
key = self . _create_cache_key ( source_file )
# Remove an existing entry ( if there is one )
# After calling this method , it is guaranteed that _ _ index [ key ]
# does not exist anymore .
self . _remove_entry ( source_file , key )
# Create a new entry . . .
# Create the sigs of all dependent files . . .
filesigs = [ ]
for filename in list ( dependent_files . keys ( ) ) :
id_ , sig = self . __filename_rep . acquire_filename ( filename )
filesigs . append ( ( id_ , sig ) )
configsig = self . _create_config_signature ( configuration )
entry = index_entry_t ( filesigs , configsig )
self . __index [ key ] = entry
self . __modified_flag = True
# Write the declarations into the cache file . . .
cachefilename = self . _create_cache_filename ( source_file )
self . _write_file ( cachefilename , declarations ) |
def _from_json_list ( cls , response_raw , wrapper = None ) :
""": type response _ raw : client . BunqResponseRaw
: type wrapper : str | None
: rtype : client . BunqResponse [ list [ cls ] ]""" | json = response_raw . body_bytes . decode ( )
obj = converter . json_to_class ( dict , json )
array = obj [ cls . _FIELD_RESPONSE ]
array_deserialized = [ ]
for item in array :
item_unwrapped = item if wrapper is None else item [ wrapper ]
item_deserialized = converter . deserialize ( cls , item_unwrapped )
array_deserialized . append ( item_deserialized )
pagination = converter . deserialize ( client . Pagination , obj [ cls . _FIELD_PAGINATION ] )
return client . BunqResponse ( array_deserialized , response_raw . headers , pagination ) |
def CEN_calc ( classes , table , TOP , P , class_name , modified = False ) :
"""Calculate CEN ( Confusion Entropy ) / MCEN ( Modified Confusion Entropy ) .
: param classes : classes
: type classes : list
: param table : input matrix
: type table : dict
: param TOP : test outcome positive
: type TOP : int
: param P : condition positive
: type P : int
: param class _ name : reviewed class name
: type class _ name : any valid type
: param modified : modified mode flag
: type modified : bool
: return : CEN ( MCEN ) as float""" | try :
result = 0
class_number = len ( classes )
for k in classes :
if k != class_name :
P_j_k = CEN_misclassification_calc ( table , TOP , P , class_name , k , class_name , modified )
P_k_j = CEN_misclassification_calc ( table , TOP , P , k , class_name , class_name , modified )
if P_j_k != 0 :
result += P_j_k * math . log ( P_j_k , 2 * ( class_number - 1 ) )
if P_k_j != 0 :
result += P_k_j * math . log ( P_k_j , 2 * ( class_number - 1 ) )
if result != 0 :
result = result * ( - 1 )
return result
except Exception :
return "None" |
def vis_aggregate_groups ( Verts , E2V , Agg , mesh_type , output = 'vtk' , fname = 'output.vtu' ) :
"""Coarse grid visualization of aggregate groups .
Create . vtu files for use in Paraview or display with Matplotlib .
Parameters
Verts : { array }
coordinate array ( N x D )
E2V : { array }
element index array ( Nel x Nelnodes )
Agg : { csr _ matrix }
sparse matrix for the aggregate - vertex relationship ( N x Nagg )
mesh _ type : { string }
type of elements : vertex , tri , quad , tet , hex ( all 3d )
fname : { string , file object }
file to be written , e . g . ' output . vtu '
output : { string }
' vtk ' or ' matplotlib '
Returns
- Writes data to . vtu file for use in paraview ( xml 0.1 format ) or
displays to screen using matplotlib
Notes
- Works for both 2d and 3d elements . Element groupings are colored
with data equal to 2.0 and stringy edges in the aggregate are colored
with 3.0
Examples
> > > from pyamg . aggregation import standard _ aggregation
> > > from pyamg . vis . vis _ coarse import vis _ aggregate _ groups
> > > from pyamg . gallery import load _ example
> > > data = load _ example ( ' unit _ square ' )
> > > A = data [ ' A ' ] . tocsr ( )
> > > V = data [ ' vertices ' ]
> > > E2V = data [ ' elements ' ]
> > > Agg = standard _ aggregation ( A ) [ 0]
> > > vis _ aggregate _ groups ( Verts = V , E2V = E2V , Agg = Agg , mesh _ type = ' tri ' ,
output = ' vtk ' , fname = ' output . vtu ' )
> > > from pyamg . aggregation import standard _ aggregation
> > > from pyamg . vis . vis _ coarse import vis _ aggregate _ groups
> > > from pyamg . gallery import load _ example
> > > data = load _ example ( ' unit _ cube ' )
> > > A = data [ ' A ' ] . tocsr ( )
> > > V = data [ ' vertices ' ]
> > > E2V = data [ ' elements ' ]
> > > Agg = standard _ aggregation ( A ) [ 0]
> > > vis _ aggregate _ groups ( Verts = V , E2V = E2V , Agg = Agg , mesh _ type = ' tet ' ,
output = ' vtk ' , fname = ' output . vtu ' )""" | check_input ( Verts = Verts , E2V = E2V , Agg = Agg , mesh_type = mesh_type )
map_type_to_key = { 'tri' : 5 , 'quad' : 9 , 'tet' : 10 , 'hex' : 12 }
if mesh_type not in map_type_to_key :
raise ValueError ( 'unknown mesh_type=%s' % mesh_type )
key = map_type_to_key [ mesh_type ]
Agg = csr_matrix ( Agg )
# remove elements with dirichlet BCs
if E2V . max ( ) >= Agg . shape [ 0 ] :
E2V = E2V [ E2V . max ( axis = 1 ) < Agg . shape [ 0 ] ]
# Find elements with all vertices in same aggregate
# account for 0 rows . Mark them as solitary aggregates
# TODO : ( Luke ) full _ aggs is not defined , I think its just a mask
# indicated with rows are not 0.
if len ( Agg . indices ) != Agg . shape [ 0 ] :
full_aggs = ( ( Agg . indptr [ 1 : ] - Agg . indptr [ : - 1 ] ) == 0 ) . nonzero ( ) [ 0 ]
new_aggs = np . array ( Agg . sum ( axis = 1 ) , dtype = int ) . ravel ( )
new_aggs [ full_aggs == 1 ] = Agg . indices
# keep existing aggregate IDs
new_aggs [ full_aggs == 0 ] = Agg . shape [ 1 ]
# fill in singletons maxID + 1
ElementAggs = new_aggs [ E2V ]
else :
ElementAggs = Agg . indices [ E2V ]
# find all aggregates encompassing full elements
# mask [ i ] = = True if all vertices in element i belong to the same aggregate
mask = np . where ( abs ( np . diff ( ElementAggs ) ) . max ( axis = 1 ) == 0 ) [ 0 ]
# mask = ( ElementAggs [ : , : ] = = ElementAggs [ : , 0 ] ) . all ( axis = 1)
E2V_a = E2V [ mask , : ]
# elements where element is full
Nel_a = E2V_a . shape [ 0 ]
# find edges of elements in the same aggregate ( brute force )
# construct vertex to vertex graph
col = E2V . ravel ( )
row = np . kron ( np . arange ( 0 , E2V . shape [ 0 ] ) , np . ones ( ( E2V . shape [ 1 ] , ) , dtype = int ) )
data = np . ones ( ( len ( col ) , ) )
if len ( row ) != len ( col ) :
raise ValueError ( 'Problem constructing vertex-to-vertex map' )
V2V = coo_matrix ( ( data , ( row , col ) ) , shape = ( E2V . shape [ 0 ] , E2V . max ( ) + 1 ) )
V2V = V2V . T * V2V
V2V = triu ( V2V , 1 ) . tocoo ( )
# get all the edges
edges = np . vstack ( ( V2V . row , V2V . col ) ) . T
# all the edges in the same aggregate
E2V_b = edges [ Agg . indices [ V2V . row ] == Agg . indices [ V2V . col ] ]
Nel_b = E2V_b . shape [ 0 ]
# 3.5 #
# single node aggregates
sums = np . array ( Agg . sum ( axis = 0 ) ) . ravel ( )
E2V_c = np . where ( sums == 1 ) [ 0 ]
Nel_c = len ( E2V_c )
# now write out the elements and edges
colors_a = 3 * np . ones ( ( Nel_a , ) )
# color triangles with threes
colors_b = 2 * np . ones ( ( Nel_b , ) )
# color edges with twos
colors_c = 1 * np . ones ( ( Nel_c , ) )
# color the vertices with ones
Cells = { 1 : E2V_c , 3 : E2V_b , key : E2V_a }
cdata = { 1 : colors_c , 3 : colors_b , key : colors_a }
# make sure it ' s a tuple
write_vtu ( Verts = Verts , Cells = Cells , fname = fname , cdata = cdata ) |
def confirm_phone_number ( self , sms_code ) :
"""Confirm phone number with the recieved SMS code
: param sms _ code : sms code
: type sms _ code : : class : ` str `
: return : success ( returns ` ` False ` ` on request fail / timeout )
: rtype : : class : ` bool `""" | sess = self . _get_web_session ( )
try :
resp = sess . post ( 'https://steamcommunity.com/steamguard/phoneajax' , data = { 'op' : 'check_sms_code' , 'arg' : sms_code , 'checkfortos' : 1 , 'skipvoip' : 1 , 'sessionid' : sess . cookies . get ( 'sessionid' , domain = 'steamcommunity.com' ) , } , timeout = 15 ) . json ( )
except :
return False
return ( resp or { } ) . get ( 'success' , False ) |
def parse_san ( self , san : str ) -> Move :
"""Uses the current position as the context to parse a move in standard
algebraic notation and returns the corresponding move object .
The returned move is guaranteed to be either legal or a null move .
: raises : : exc : ` ValueError ` if the SAN is invalid or ambiguous .""" | # Castling .
try :
if san in [ "O-O" , "O-O+" , "O-O#" ] :
return next ( move for move in self . generate_castling_moves ( ) if self . is_kingside_castling ( move ) )
elif san in [ "O-O-O" , "O-O-O+" , "O-O-O#" ] :
return next ( move for move in self . generate_castling_moves ( ) if self . is_queenside_castling ( move ) )
except StopIteration :
raise ValueError ( "illegal san: {!r} in {}" . format ( san , self . fen ( ) ) )
# Match normal moves .
match = SAN_REGEX . match ( san )
if not match : # Null moves .
if san in [ "--" , "Z0" ] :
return Move . null ( )
raise ValueError ( "invalid san: {!r}" . format ( san ) )
# Get target square .
to_square = SQUARE_NAMES . index ( match . group ( 4 ) )
to_mask = BB_SQUARES [ to_square ] & ~ self . occupied_co [ self . turn ]
# Get the promotion type .
p = match . group ( 5 )
promotion = p and PIECE_SYMBOLS . index ( p [ - 1 ] . lower ( ) )
# Filter by piece type .
if match . group ( 1 ) :
piece_type = PIECE_SYMBOLS . index ( match . group ( 1 ) . lower ( ) )
from_mask = self . pieces_mask ( piece_type , self . turn )
else :
from_mask = self . pawns
# Filter by source file .
if match . group ( 2 ) :
from_mask &= BB_FILES [ FILE_NAMES . index ( match . group ( 2 ) ) ]
# Filter by source rank .
if match . group ( 3 ) :
from_mask &= BB_RANKS [ int ( match . group ( 3 ) ) - 1 ]
# Match legal moves .
matched_move = None
for move in self . generate_legal_moves ( from_mask , to_mask ) :
if move . promotion != promotion :
continue
if matched_move :
raise ValueError ( "ambiguous san: {!r} in {}" . format ( san , self . fen ( ) ) )
matched_move = move
if not matched_move :
raise ValueError ( "illegal san: {!r} in {}" . format ( san , self . fen ( ) ) )
return matched_move |
def walletpassphrase ( self , passphrase , timeout = 99999999 , mint_only = True ) :
"""used to unlock wallet for minting""" | return self . req ( "walletpassphrase" , [ passphrase , timeout , mint_only ] ) |
def TP3 ( x , u , jac = False ) :
'''Demo problem 1 for horsetail matching , takes two input values of
size 1''' | q = 2 + 0.5 * x + 1.5 * ( 1 - x ) * u
if not jac :
return q
else :
grad = 0.5 - 1.5 * u
return q , grad |
def nsUriMatch ( self , value , wanted , strict = 0 , tt = type ( ( ) ) ) :
"""Return a true value if two namespace uri values match .""" | if value == wanted or ( type ( wanted ) is tt ) and value in wanted :
return 1
if not strict and value is not None :
wanted = type ( wanted ) is tt and wanted or ( wanted , )
value = value [ - 1 : ] != '/' and value or value [ : - 1 ]
for item in wanted :
if item == value or item [ : - 1 ] == value :
return 1
return 0 |
def run_vardict ( align_bams , items , ref_file , assoc_files , region = None , out_file = None ) :
"""Run VarDict variant calling .""" | items = shared . add_highdepth_genome_exclusion ( items )
if vcfutils . is_paired_analysis ( align_bams , items ) :
call_file = _run_vardict_paired ( align_bams , items , ref_file , assoc_files , region , out_file )
else :
vcfutils . check_paired_problems ( items )
call_file = _run_vardict_caller ( align_bams , items , ref_file , assoc_files , region , out_file )
return call_file |
def update_express ( self , template_id , delivery_template ) :
"""增加邮费模板
: param template _ id : 邮费模板ID
: param delivery _ template : 邮费模板信息 ( 字段说明详见增加邮费模板 )
: return : 返回的 JSON 数据包""" | delivery_template [ 'template_id' ] = template_id
return self . _post ( 'merchant/express/update' , data = delivery_template ) |
def on_person_update ( self , people ) :
"""People have changed ( e . g . a sensor value )
: param people : People whos state changed ( may include unchanged )
: type people : list [ paps . person . Person ]
: rtype : None""" | self . debug ( "()" )
changed = [ ]
with self . _people_lock :
for p in people :
person = Person . from_person ( p )
if person . id not in self . _people :
self . warning ( u"{} not in audience" . format ( person . id ) )
self . _people [ person . id ] = person
# Check if really changed ? - trust source for now
changed . append ( person )
for plugin in self . plugins :
try :
plugin . on_person_update ( changed )
except :
self . exception ( u"Failed to send updated people to {}" . format ( plugin . name ) ) |
def parse ( argv , rules = None , config = None , ** kwargs ) :
"""Parse the given arg vector with the default Splunk command rules .""" | parser_ = parser ( rules , ** kwargs )
if config is not None :
parser_ . loadrc ( config )
return parser_ . parse ( argv ) . result |
def sort ( self , col : str ) :
"""Sorts the main dataframe according to the given column
: param col : column name
: type col : str
: example : ` ` ds . sort ( " Col 1 " ) ` `""" | try :
self . df = self . df . copy ( ) . sort_values ( col )
except Exception as e :
self . err ( e , "Can not sort the dataframe from column " + str ( col ) ) |
def result_worker ( ctx , result_cls , get_object = False ) :
"""Run result worker .""" | g = ctx . obj
ResultWorker = load_cls ( None , None , result_cls )
result_worker = ResultWorker ( resultdb = g . resultdb , inqueue = g . processor2result )
g . instances . append ( result_worker )
if g . get ( 'testing_mode' ) or get_object :
return result_worker
result_worker . run ( ) |
def configure_lease ( self , lease , lease_max , mount_point = DEFAULT_MOUNT_POINT ) :
"""Configure lease settings for the AWS secrets engine .
It is optional , as there are default values for lease and lease _ max .
Supported methods :
POST : / { mount _ point } / config / lease . Produces : 204 ( empty body )
: param lease : Specifies the lease value provided as a string duration with time suffix . " h " ( hour ) is the
largest suffix .
: type lease : str | unicode
: param lease _ max : Specifies the maximum lease value provided as a string duration with time suffix . " h " ( hour )
is the largest suffix .
: type lease _ max : str | unicode
: param mount _ point : The " path " the method / backend was mounted on .
: type mount _ point : str | unicode
: return : The response of the request .
: rtype : requests . Response""" | params = { 'lease' : lease , 'lease_max' : lease_max , }
api_path = '/v1/{mount_point}/config/lease' . format ( mount_point = mount_point )
return self . _adapter . post ( url = api_path , json = params , ) |
def get_file_content ( self , path ) :
"""Returns content of the file at given ` ` path ` ` .""" | id = self . _get_id_for_path ( path )
blob = self . repository . _repo [ id ]
return blob . as_pretty_string ( ) |
def around_me ( self , member , ** options ) :
'''Retrieve a page of leaders from the leaderboard around a given member .
@ param member [ String ] Member name .
@ param options [ Hash ] Options to be used when retrieving the page from the leaderboard .
@ return a page of leaders from the leaderboard around a given member .''' | return self . around_me_in ( self . leaderboard_name , member , ** options ) |
def wrap_results_for_axis ( self ) :
"""return the results for the rows""" | results = self . results
result = self . obj . _constructor ( data = results )
if not isinstance ( results [ 0 ] , ABCSeries ) :
try :
result . index = self . res_columns
except ValueError :
pass
try :
result . columns = self . res_index
except ValueError :
pass
return result |
def get_attribute_compound ( attribute , value = None , splitter = "|" , binding_identifier = "@" ) :
"""Returns an attribute compound .
Usage : :
> > > data = " @ Link | Value | Boolean | Link Parameter "
> > > attribute _ compound = foundations . parsers . get _ attribute _ compound ( " Attribute Compound " , data )
> > > attribute _ compound . name
u ' Attribute Compound '
> > > attribute _ compound . value
u ' Value '
> > > attribute _ compound . link
u ' @ Link '
> > > attribute _ compound . type
u ' Boolean '
> > > attribute _ compound . alias
u ' Link Parameter '
: param attribute : Attribute .
: type attribute : unicode
: param value : Attribute value .
: type value : object
: param splitter : Splitter .
: type splitter : unicode
: param binding _ identifier : Binding identifier .
: type binding _ identifier : unicode
: return : Attribute compound .
: rtype : AttributeCompound""" | LOGGER . debug ( "> Attribute: '{0}', value: '{1}'." . format ( attribute , value ) )
if type ( value ) is unicode :
if splitter in value :
value_tokens = value . split ( splitter )
if len ( value_tokens ) >= 3 and re . search ( r"{0}\w*" . format ( binding_identifier ) , value_tokens [ 0 ] ) :
return AttributeCompound ( name = attribute , value = value_tokens [ 1 ] . strip ( ) , link = value_tokens [ 0 ] . strip ( ) , type = value_tokens [ 2 ] . strip ( ) , alias = len ( value_tokens ) == 4 and value_tokens [ 3 ] . strip ( ) or None )
else :
if re . search ( r"{0}\w*" . format ( binding_identifier ) , value ) :
return AttributeCompound ( name = attribute , value = None , link = value , type = None , alias = None )
return AttributeCompound ( name = attribute , value = value , link = None , type = None , alias = None ) |
def remove ( self , dic ) :
'''remove the pair by passing a identical dict
Args :
dic ( dict ) : key and value''' | for kw in dic :
removePair = Pair ( kw , dic [ kw ] )
self . _remove ( [ removePair ] ) |
def db_snapshot_append ( cls , cur , block_id , consensus_hash , ops_hash , timestamp ) :
"""Append hash info for the last block processed , and the time at which it was done .
Meant to be executed as part of a transaction .
Return True on success
Raise an exception on invalid block number
Abort on db error""" | query = 'INSERT INTO snapshots (block_id,consensus_hash,ops_hash,timestamp) VALUES (?,?,?,?);'
args = ( block_id , consensus_hash , ops_hash , timestamp )
cls . db_query_execute ( cur , query , args )
return True |
def evaluate ( self ) :
"""Evaluate functional value of previous iteration""" | if self . opt [ 'AccurateDFid' ] :
D = self . dstep . var_y ( )
X = self . xstep . var_y ( )
S = self . xstep . S
dfd = 0.5 * np . linalg . norm ( ( D . dot ( X ) - S ) ) ** 2
rl1 = np . sum ( np . abs ( X ) )
return dict ( DFid = dfd , RegL1 = rl1 , ObjFun = dfd + self . xstep . lmbda * rl1 )
else :
return None |
def introspect_operation ( self , operation ) :
"""Introspects an entire operation , returning : :
* the method name ( to expose to the user )
* the API name ( used server - side )
* docs
* introspected information about the parameters
* information about the output
: param operation : The operation to introspect
: type operation : A < botocore . operation . Operation > object
: returns : A dict of information""" | return { 'method_name' : operation . py_name , 'api_name' : operation . name , 'docs' : self . convert_docs ( operation . documentation ) , 'params' : self . parse_params ( operation . params ) , 'output' : operation . output , } |
def send ( self , message ) :
"""Sends a message to all subscribers of destination .
@ param message : The message frame . ( The frame will be modified to set command
to MESSAGE and set a message id . )
@ type message : L { stompclient . frame . Frame }""" | dest = message . headers . get ( 'destination' )
if not dest :
raise ValueError ( "Cannot send frame with no destination: %s" % message )
message . cmd = 'message'
message . headers . setdefault ( 'message-id' , str ( uuid . uuid4 ( ) ) )
bad_subscribers = set ( )
for subscriber in self . _topics [ dest ] :
try :
subscriber . send_frame ( message )
except :
self . log . exception ( "Error delivering message to subscriber %s; client will be disconnected." % subscriber )
# We queue for deletion so we are not modifying the topics dict
# while iterating over it .
bad_subscribers . add ( subscriber )
for subscriber in bad_subscribers :
self . disconnect ( subscriber ) |
def _get_pores ( self , sampling_points ) :
"""Under development .""" | pores = [ ]
for point in sampling_points :
pores . append ( Pore ( self . system [ 'elements' ] , self . system [ 'coordinates' ] , com = point ) )
return pores |
def _make_image_description ( self , datasets , ** kwargs ) :
"""generate image description for mitiff .
Satellite : NOAA 18
Date and Time : 06:58 31/05-2016
SatDir : 0
Channels : 6 In this file : 1 - VIS0.63 2 - VIS0.86 3(3B ) - IR3.7
4 - IR10.8 5 - IR11.5 6(3A ) - VIS1.6
Xsize : 4720
Ysize : 5544
Map projection : Stereographic
Proj string : + proj = stere + lon _ 0 = 0 + lat _ 0 = 90 + lat _ ts = 60
+ ellps = WGS84 + towgs84 = 0,0,0 + units = km
+ x _ 0 = 2526000.00000 + y _ 0 = 5806000.00000
TrueLat : 60 N
GridRot : 0
Xunit : 1000 m Yunit : 1000 m
NPX : 0.00000 NPY : 0.00000
Ax : 1.00000 Ay : 1.00000 Bx : - 2526.00000 By : - 262.00000
Satellite : < satellite name >
Date and Time : < HH : MM dd / mm - yyyy >
SatDir : 0
Channels : < number of chanels > In this file : < channels names in order >
Xsize : < number of pixels x >
Ysize : < number of pixels y >
Map projection : Stereographic
Proj string : < proj4 string with + x _ 0 and + y _ 0 which is the positive
distance from proj origo
to the lower left corner of the image data >
TrueLat : 60 N
GridRot : 0
Xunit : 1000 m Yunit : 1000 m
NPX : 0.00000 NPY : 0.00000
Ax : < pixels size x in km > Ay : < pixel size y in km > Bx : < left corner of
upper right pixel in km >
By : < upper corner of upper right pixel in km >
if palette image write special palette
if normal channel write table calibration :
Table _ calibration : < channel name > , < calibration type > , [ < unit > ] ,
< no of bits of data > ,
[ < calibration values space separated > ] \n \n""" | translate_platform_name = { 'metop01' : 'Metop-B' , 'metop02' : 'Metop-A' , 'metop03' : 'Metop-C' , 'noaa15' : 'NOAA-15' , 'noaa16' : 'NOAA-16' , 'noaa17' : 'NOAA-17' , 'noaa18' : 'NOAA-18' , 'noaa19' : 'NOAA-19' }
first_dataset = datasets
if isinstance ( datasets , list ) :
LOG . debug ( "Datasets is a list of dataset" )
first_dataset = datasets [ 0 ]
if 'platform_name' in first_dataset . attrs :
_platform_name = translate_platform_name . get ( first_dataset . attrs [ 'platform_name' ] , first_dataset . attrs [ 'platform_name' ] )
elif 'platform_name' in kwargs :
_platform_name = translate_platform_name . get ( kwargs [ 'platform_name' ] , kwargs [ 'platform_name' ] )
else :
_platform_name = None
_image_description = ''
_image_description . encode ( 'utf-8' )
_image_description += ' Satellite: '
if _platform_name is not None :
_image_description += _platform_name
_image_description += '\n'
_image_description += ' Date and Time: '
# Select earliest start _ time
first = True
earliest = 0
for dataset in datasets :
if first :
earliest = dataset . attrs [ 'start_time' ]
else :
if dataset . attrs [ 'start_time' ] < earliest :
earliest = dataset . attrs [ 'start_time' ]
first = False
LOG . debug ( "earliest start_time: %s" , earliest )
_image_description += earliest . strftime ( "%H:%M %d/%m-%Y\n" )
_image_description += ' SatDir: 0\n'
_image_description += ' Channels: '
if isinstance ( datasets , list ) :
LOG . debug ( "len datasets: %s" , len ( datasets ) )
_image_description += str ( len ( datasets ) )
elif 'bands' in datasets . sizes :
LOG . debug ( "len datasets: %s" , datasets . sizes [ 'bands' ] )
_image_description += str ( datasets . sizes [ 'bands' ] )
elif len ( datasets . sizes ) == 2 :
LOG . debug ( "len datasets: 1" )
_image_description += '1'
_image_description += ' In this file: '
channels = self . _make_channel_list ( datasets , ** kwargs )
try :
cns = self . translate_channel_name . get ( kwargs [ 'sensor' ] , { } )
except KeyError :
pass
_image_description += self . _channel_names ( channels , cns , ** kwargs )
_image_description += self . _add_sizes ( datasets , first_dataset )
_image_description += ' Map projection: Stereographic\n'
_image_description += self . _add_proj4_string ( datasets , first_dataset )
_image_description += ' TrueLat: 60N\n'
_image_description += ' GridRot: 0\n'
_image_description += ' Xunit:1000 m Yunit: 1000 m\n'
_image_description += ' NPX: %.6f' % ( 0 )
_image_description += ' NPY: %.6f' % ( 0 ) + '\n'
_image_description += self . _add_pixel_sizes ( datasets , first_dataset )
_image_description += self . _add_corners ( datasets , first_dataset )
if isinstance ( datasets , list ) :
LOG . debug ( "Area extent: %s" , first_dataset . attrs [ 'area' ] . area_extent )
else :
LOG . debug ( "Area extent: %s" , datasets . attrs [ 'area' ] . area_extent )
_image_description += self . _add_calibration ( channels , cns , datasets , ** kwargs )
return _image_description |
def decorate_cls_with_validation ( cls , field_name , # type : str
* validation_func , # type : ValidationFuncs
** kwargs ) : # type : ( . . . ) - > Type [ Any ]
"""This method is equivalent to decorating a class with the ` @ validate _ field ` decorator but can be used a posteriori .
: param cls : the class to decorate
: param field _ name : the name of the argument to validate or _ OUT _ KEY for output validation
: param validation _ func : the validation function or
list of validation functions to use . A validation function may be a callable , a tuple ( callable , help _ msg _ str ) ,
a tuple ( callable , failure _ type ) , or a list of several such elements . Nested lists are supported and indicate an
implicit ` and _ ` ( such as the main list ) . Tuples indicate an implicit ` _ failure _ raiser ` .
[ mini _ lambda ] ( https : / / smarie . github . io / python - mini - lambda / ) expressions can be used instead of callables , they
will be transformed to functions automatically .
: param error _ type : a subclass of ValidationError to raise in case of validation failure . By default a
ValidationError will be raised with the provided help _ msg
: param help _ msg : an optional help message to be used in the raised error in case of validation failure .
: param none _ policy : describes how None values should be handled . See ` NoneArgPolicy ` for the various possibilities .
Default is ` NoneArgPolicy . ACCEPT _ IF _ OPTIONAl _ ELSE _ REJECT ` .
: param kw _ context _ args : optional contextual information to store in the exception , and that may be also used
to format the help message
: return : the decorated function , that will perform input validation ( using ` _ assert _ input _ is _ valid ` ) before
executing the function ' s code everytime it is executed .""" | error_type , help_msg , none_policy = pop_kwargs ( kwargs , [ ( 'error_type' , None ) , ( 'help_msg' , None ) , ( 'none_policy' , None ) ] , allow_others = True )
# the rest of keyword arguments is used as context .
kw_context_args = kwargs
if not isclass ( cls ) :
raise TypeError ( 'decorated cls should be a class' )
if hasattr ( cls , field_name ) : # * * A class field with that name exist . Is it a descriptor ?
var = cls . __dict__ [ field_name ]
# note : we cannot use getattr here
if hasattr ( var , '__set__' ) and callable ( var . __set__ ) :
if isinstance ( var , property ) : # * * * OLD WAY which was losing type hints and default values ( see var . _ _ set _ _ signature ) * * *
# properties are special beasts : their methods are method - wrappers ( CPython ) and can not have properties
# so we have to create a wrapper ( sic ) before sending it to the main wrapping function
# def func ( inst , value ) :
# var . _ _ set _ _ ( inst , value )
# * * * NEW WAY : more elegant , use directly the setter provided by the user * * *
func = var . fset
nb_args = 2
elif ismethod ( var . __set__ ) : # bound method : normal . Let ' s access to the underlying function
func = var . __set__ . __func__
nb_args = 3
else : # strange . . but lets try to continue
func = var . __set__
nb_args = 3
# retrieve target function signature , check it and retrieve the 3d param
# since signature is " def _ _ set _ _ ( self , obj , val ) "
func_sig = signature ( func )
if len ( func_sig . parameters ) != nb_args :
raise ValueError ( "Class field '{}' is a valid class descriptor for class '{}' but it does not implement" " __set__ with the correct number of parameters, so it is not possible to add " "validation to it. See https://docs.python.org/3.6/howto/descriptor.html" . format ( field_name , cls . __name__ ) )
# extract the correct name
descriptor_arg_name = list ( func_sig . parameters . items ( ) ) [ - 1 ] [ 0 ]
# do the same than in decorate _ with _ validation but with a class field validator
# new _ setter = decorate _ with _ validation ( func , descriptor _ arg _ name , * validation _ func , help _ msg = help _ msg ,
# error _ type = error _ type , none _ policy = none _ policy ,
# _ clazz _ field _ name _ = field _ name , * * kw _ context _ args )
# - - create the new validator
none_policy = none_policy or NoneArgPolicy . SKIP_IF_NONABLE_ELSE_VALIDATE
new_validator = _create_function_validator ( func , func_sig , descriptor_arg_name , * validation_func , none_policy = none_policy , error_type = error_type , help_msg = help_msg , validated_class = cls , validated_class_field_name = field_name , ** kw_context_args )
# - - create the new setter with validation
new_setter = decorate_with_validators ( func , func_signature = func_sig , ** { descriptor_arg_name : new_validator } )
# replace the old one
if isinstance ( var , property ) : # properties are special beasts 2
setattr ( cls , field_name , var . setter ( new_setter ) )
else : # do not use type ( ) for python 2 compat
var . __class__ . __set__ = new_setter
elif ( hasattr ( var , '__get__' ) and callable ( var . __get__ ) ) or ( hasattr ( var , '__delete__' ) and callable ( var . __delete__ ) ) : # this is a descriptor but it does not have any setter method : impossible to validate
raise ValueError ( "Class field '{}' is a valid class descriptor for class '{}' but it does not implement " "__set__ so it is not possible to add validation to it. See " "https://docs.python.org/3.6/howto/descriptor.html" . format ( field_name , cls . __name__ ) )
else : # this is not a descriptor : unsupported
raise ValueError ( "Class field '{}.{}' is not a valid class descriptor, see " "https://docs.python.org/3.6/howto/descriptor.html" . format ( cls . __name__ , field_name ) )
else : # * * No class field with that name exist
# ? check for attrs ? > no specific need anymore , this is the same than annotating the constructor
# if hasattr ( cls , ' _ _ attrs _ attrs _ _ ' ) : this was a proof of attrs - defined class
# try to annotate the generated constructor
try :
init_func = cls . __init__
if sys . version_info < ( 3 , 0 ) :
try : # python 2 - we have to access the inner ` im _ func `
init_func = cls . __init__ . im_func
except AttributeError :
pass
cls . __init__ = decorate_with_validation ( init_func , field_name , * validation_func , help_msg = help_msg , _constructor_of_cls_ = cls , error_type = error_type , none_policy = none_policy , ** kw_context_args )
except InvalidNameError : # the field was not found
# TODO should we also check if a _ _ setattr _ _ is defined ?
# ( for _ _ setattr _ _ see https : / / stackoverflow . com / questions / 15750522 / class - properties - and - setattr / 15751159)
# finally raise an error
raise ValueError ( "@validate_field definition exception: field '{}' can not be found in class '{}', and it " "is also not an input argument of the __init__ method." . format ( field_name , cls . __name__ ) )
return cls |
def _pre_commit ( self , transaction , * args , ** kwargs ) :
"""Begin transaction and call the wrapped callable .
If the callable raises an exception , the transaction will be rolled
back . If not , the transaction will be " ready " for ` ` Commit ` ` ( i . e .
it will have staged writes ) .
Args :
transaction ( ~ . firestore _ v1beta1 . transaction . Transaction ) : A
transaction to execute the callable within .
args ( Tuple [ Any , . . . ] ) : The extra positional arguments to pass
along to the wrapped callable .
kwargs ( Dict [ str , Any ] ) : The extra keyword arguments to pass
along to the wrapped callable .
Returns :
Any : result of the wrapped callable .
Raises :
Exception : Any failure caused by ` ` to _ wrap ` ` .""" | # Force the ` ` transaction ` ` to be not " in progress " .
transaction . _clean_up ( )
transaction . _begin ( retry_id = self . retry_id )
# Update the stored transaction IDs .
self . current_id = transaction . _id
if self . retry_id is None :
self . retry_id = self . current_id
try :
return self . to_wrap ( transaction , * args , ** kwargs )
except : # noqa
# NOTE : If ` ` rollback ` ` fails this will lose the information
# from the original failure .
transaction . _rollback ( )
raise |
def s2time ( secs , show_secs = True , show_fracs = True ) :
"""Converts seconds to time""" | try :
secs = float ( secs )
except :
return "--:--:--.--"
wholesecs = int ( secs )
centisecs = int ( ( secs - wholesecs ) * 100 )
hh = int ( wholesecs / 3600 )
hd = int ( hh % 24 )
mm = int ( ( wholesecs / 60 ) - ( hh * 60 ) )
ss = int ( wholesecs - ( hh * 3600 ) - ( mm * 60 ) )
r = "{:02d}:{:02d}" . format ( hd , mm )
if show_secs :
r += ":{:02d}" . format ( ss )
if show_fracs :
r += ".{:02d}" . format ( centisecs )
return r |
def create_route ( self , item , routes ) :
"""Stores a new item in routing map""" | for route in routes :
self . _routes . setdefault ( route , set ( ) ) . add ( item )
return item |
def comment_magic ( source , language = 'python' , global_escape_flag = True ) :
"""Escape Jupyter magics with ' # '""" | parser = StringParser ( language )
next_is_magic = False
for pos , line in enumerate ( source ) :
if not parser . is_quoted ( ) and ( next_is_magic or is_magic ( line , language , global_escape_flag ) ) :
source [ pos ] = _COMMENT [ language ] + ' ' + line
next_is_magic = language == 'python' and _LINE_CONTINUATION_RE . match ( line )
parser . read_line ( line )
return source |
def GenerateDSP ( dspfile , source , env ) :
"""Generates a Project file based on the version of MSVS that is being used""" | version_num = 6.0
if 'MSVS_VERSION' in env :
version_num , suite = msvs_parse_version ( env [ 'MSVS_VERSION' ] )
if version_num >= 10.0 :
g = _GenerateV10DSP ( dspfile , source , env )
g . Build ( )
elif version_num >= 7.0 :
g = _GenerateV7DSP ( dspfile , source , env )
g . Build ( )
else :
g = _GenerateV6DSP ( dspfile , source , env )
g . Build ( ) |
def get_all_units ( self , params = None ) :
"""Get all units
This will iterate over all pages until it gets all elements .
So if the rate limit exceeded it will throw an Exception and you will get nothing
: param params : search params
: return : list""" | if not params :
params = { }
return self . _iterate_through_pages ( self . get_units_per_page , resource = UNITS , ** { 'params' : params } ) |
def validate_files ( pelican ) :
"""Validate a generated HTML file
: param pelican : pelican object""" | for dirpath , _ , filenames in os . walk ( pelican . settings [ 'OUTPUT_PATH' ] ) :
for name in filenames :
if should_validate ( name ) :
filepath = os . path . join ( dirpath , name )
validate ( filepath ) |
def get_jsapi_ticket ( self ) :
"""获取微信 JS - SDK ticket
该方法会通过 session 对象自动缓存管理 ticket
: return : ticket""" | ticket_key = '{}_jsapi_ticket' . format ( self . _client . corp_id )
expires_at_key = '{}_jsapi_ticket_expires_at' . format ( self . _client . corp_id )
ticket = self . session . get ( ticket_key )
expires_at = self . session . get ( expires_at_key , 0 )
if not ticket or expires_at < int ( time . time ( ) ) :
jsapi_ticket = self . get_ticket ( )
ticket = jsapi_ticket [ 'ticket' ]
expires_at = int ( time . time ( ) ) + int ( jsapi_ticket [ 'expires_in' ] )
self . session . set ( ticket_key , ticket )
self . session . set ( expires_at_key , expires_at )
return ticket |
def _SetOptions ( self , options , options_class_name ) :
"""Sets the descriptor ' s options
This function is used in generated proto2 files to update descriptor
options . It must not be used outside proto2.""" | self . _options = options
self . _options_class_name = options_class_name
# Does this descriptor have non - default options ?
self . has_options = options is not None |
def _tensor_proto_to_health_pill ( self , tensor_event , node_name , device , output_slot ) :
"""Converts an event _ accumulator . TensorEvent to a HealthPillEvent .
Args :
tensor _ event : The event _ accumulator . TensorEvent to convert .
node _ name : The name of the node ( without the output slot ) .
device : The device .
output _ slot : The integer output slot this health pill is relevant to .
Returns :
A HealthPillEvent .""" | return self . _process_health_pill_value ( wall_time = tensor_event . wall_time , step = tensor_event . step , device_name = device , output_slot = output_slot , node_name = node_name , tensor_proto = tensor_event . tensor_proto ) |
def elevate ( self ) :
r"""Return a degree - elevated version of the current surface .
Does this by converting the current nodes
: math : ` \ left \ { v _ { i , j , k } \ right \ } _ { i + j + k = d } ` to new nodes
: math : ` \ left \ { w _ { i , j , k } \ right \ } _ { i + j + k = d + 1 } ` . Does so
by re - writing
. . math : :
E \ left ( \ lambda _ 1 , \ lambda _ 2 , \ lambda _ 3 \ right ) =
\ left ( \ lambda _ 1 + \ lambda _ 2 + \ lambda _ 3 \ right )
B \ left ( \ lambda _ 1 , \ lambda _ 2 , \ lambda _ 3 \ right ) =
\ sum _ { i + j + k = d + 1 } \ binom { d + 1 } { i \ , j \ , k }
\ lambda _ 1 ^ i \ lambda _ 2 ^ j \ lambda _ 3 ^ k \ cdot w _ { i , j , k }
In this form , we must have
. . math : :
\ begin { align * }
\ binom { d + 1 } { i \ , j \ , k } \ cdot w _ { i , j , k } & =
\ binom { d } { i - 1 \ , j \ , k } \ cdot v _ { i - 1 , j , k } +
\ binom { d } { i \ , j - 1 \ , k } \ cdot v _ { i , j - 1 , k } +
\ binom { d } { i \ , j \ , k - 1 } \ cdot v _ { i , j , k - 1 } \ \
\ Longleftrightarrow ( d + 1 ) \ cdot w _ { i , j , k } & =
i \ cdot v _ { i - 1 , j , k } + j \ cdot v _ { i , j - 1 , k } +
k \ cdot v _ { i , j , k - 1}
\ end { align * }
where we define , for example , : math : ` v _ { i , j , k - 1 } = 0 `
if : math : ` k = 0 ` .
. . image : : . . / . . / images / surface _ elevate . png
: align : center
. . doctest : : surface - elevate
: options : + NORMALIZE _ WHITESPACE
> > > nodes = np . asfortranarray ( [
. . . [ 0.0 , 1.5 , 3.0 , 0.75 , 2.25 , 0.0 ] ,
. . . [ 0.0 , 0.0 , 0.0 , 1.5 , 2.25 , 3.0 ] ,
> > > surface = bezier . Surface ( nodes , degree = 2)
> > > elevated = surface . elevate ( )
> > > elevated
< Surface ( degree = 3 , dimension = 2 ) >
> > > elevated . nodes
array ( [ [ 0 . , 1 . , 2 . , 3 . , 0.5 , 1.5 , 2.5 , 0.5 , 1.5 , 0 . ] ,
[0 . , 0 . , 0 . , 0 . , 1 . , 1.25 , 1.5 , 2 . , 2.5 , 3 . ] ] )
. . testcleanup : : surface - elevate
import make _ images
make _ images . surface _ elevate ( surface , elevated )
Returns :
Surface : The degree - elevated surface .""" | _ , num_nodes = self . _nodes . shape
# ( d + 1 ) ( d + 2 ) / 2 - - > ( d + 2 ) ( d + 3 ) / 2
num_new = num_nodes + self . _degree + 2
new_nodes = np . zeros ( ( self . _dimension , num_new ) , order = "F" )
# NOTE : We start from the index triples ( i , j , k ) for the current
# nodes and map them onto ( i + 1 , j , k ) , etc . This index
# tracking is also done in : func : ` . de _ casteljau _ one _ round ` .
index = 0
# parent _ i1 = index + k
# parent _ i2 = index + k + 1
# parent _ i3 = index + degree + 2
parent_i1 = 0
parent_i2 = 1
parent_i3 = self . _degree + 2
for k in six . moves . xrange ( self . _degree + 1 ) :
for j in six . moves . xrange ( self . _degree + 1 - k ) :
i = self . _degree - j - k
new_nodes [ : , parent_i1 ] += ( i + 1 ) * self . _nodes [ : , index ]
new_nodes [ : , parent_i2 ] += ( j + 1 ) * self . _nodes [ : , index ]
new_nodes [ : , parent_i3 ] += ( k + 1 ) * self . _nodes [ : , index ]
# Update all the indices .
parent_i1 += 1
parent_i2 += 1
parent_i3 += 1
index += 1
# Update the indices that depend on k .
parent_i1 += 1
parent_i2 += 1
# Hold off on division until the end , to ( attempt to ) avoid round - off .
denominator = self . _degree + 1.0
new_nodes /= denominator
return Surface ( new_nodes , self . _degree + 1 , _copy = False ) |
def clone_repo ( self ) :
"""Clone a repository containing the dotfiles source .""" | tempdir_path = tempfile . mkdtemp ( )
if self . args . git :
self . log . debug ( 'Cloning git source repository from %s to %s' , self . source , tempdir_path )
self . sh ( 'git clone' , self . source , tempdir_path )
else :
raise NotImplementedError ( 'Unknown repo type' )
self . source = tempdir_path |
def home ( request , hproPk ) :
"""Route the request to runURI if defined otherwise go to plugIt""" | if settings . PIAPI_STANDALONE :
return main ( request , '' , hproPk )
( plugIt , baseURI , hproject ) = getPlugItObject ( hproPk )
if hproject . runURI :
return HttpResponseRedirect ( hproject . runURI )
else : # Check if a custom url key is used
if hasattr ( hproject , 'plugItCustomUrlKey' ) and hproject . plugItCustomUrlKey :
return HttpResponseRedirect ( reverse ( 'plugIt.views.main' , args = ( hproject . plugItCustomUrlKey , '' ) ) )
return main ( request , '' , hproPk ) |
def delete ( self , loc ) :
"""Make new index with passed location deleted
Returns
new _ index : MultiIndex""" | new_codes = [ np . delete ( level_codes , loc ) for level_codes in self . codes ]
return MultiIndex ( levels = self . levels , codes = new_codes , names = self . names , verify_integrity = False ) |
def get_verbose_field_name ( instance , field_name ) :
"""Returns verbose _ name for a field .""" | fields = [ field . name for field in instance . _meta . fields ]
if field_name in fields :
return instance . _meta . get_field ( field_name ) . verbose_name
else :
return field_name |
def object_to_dict ( cls , obj ) :
"""This function converts Objects into Dictionary""" | dict_obj = dict ( )
if obj is not None :
if type ( obj ) == list :
dict_list = [ ]
for inst in obj :
dict_list . append ( cls . object_to_dict ( inst ) )
dict_obj [ "list" ] = dict_list
elif not cls . is_primitive ( obj ) :
for key in obj . __dict__ : # is an object
if type ( obj . __dict__ [ key ] ) == list :
dict_list = [ ]
for inst in obj . __dict__ [ key ] :
dict_list . append ( cls . object_to_dict ( inst ) )
dict_obj [ key ] = dict_list
elif not cls . is_primitive ( obj . __dict__ [ key ] ) :
temp_dict = cls . object_to_dict ( obj . __dict__ [ key ] )
dict_obj [ key ] = temp_dict
else :
dict_obj [ key ] = obj . __dict__ [ key ]
elif cls . is_primitive ( obj ) :
return obj
return dict_obj |
def CoefficientOfNetworkComplexity_metric ( bpmn_graph ) :
"""Returns the value of the Coefficient of Network Complexity metric
( " Ratio of the total number of arcs in a process model to its total number of nodes . " )
for the BPMNDiagramGraph instance .
: param bpmn _ graph : an instance of BpmnDiagramGraph representing BPMN model .""" | return float ( len ( bpmn_graph . get_flows ( ) ) ) / float ( len ( bpmn_graph . get_nodes ( ) ) ) |
def get_symbol_id_map ( self ) :
"""A convenience method to create a mapping between the HGNC
symbols and their identifiers .
: return :""" | symbol_id_map = { }
f = '/' . join ( ( self . rawdir , self . files [ 'genes' ] [ 'file' ] ) )
with open ( f , 'r' , encoding = "utf8" ) as csvfile :
filereader = csv . reader ( csvfile , delimiter = '\t' , quotechar = '\"' )
for row in filereader :
( hgnc_id , symbol , name , locus_group , locus_type , status , location , location_sortable , alias_symbol , alias_name , prev_symbol , prev_name , gene_family , gene_family_id , date_approved_reserved , date_symbol_changed , date_name_changed , date_modified , entrez_id , ensembl_gene_id , vega_id , ucsc_id , ena , refseq_accession , ccds_id , uniprot_ids , pubmed_id , mgd_id , rgd_id , lsdb , cosmic , omim_id , mirbase , homeodb , snornabase , bioparadigms_slc , orphanet , pseudogene_org , horde_id , merops , imgt , iuphar , kznf_gene_catalog , mamit_trnadb , cd , lncrnadb , enzyme_id , intermediate_filament_db ) = row
symbol_id_map [ symbol . strip ( ) ] = hgnc_id
return symbol_id_map |
def PushAttributeContainer ( self , serialized_data ) :
"""Pushes a serialized attribute container onto the list .
Args :
serialized _ data ( bytes ) : serialized attribute container data .""" | self . _list . append ( serialized_data )
self . data_size += len ( serialized_data )
self . next_sequence_number += 1 |
def get_preview_kwargs ( self , ** kwargs ) :
"""Gets the url keyword arguments to pass to the
` preview _ view ` callable . If the ` pass _ through _ kwarg `
attribute is set the value of ` pass _ through _ attr ` will
be looked up on the object .
So if you are previewing an item Obj < id = 2 > and
self . pass _ through _ kwarg = ' object _ id '
self . pass _ through _ attr = ' pk '
This will return
{ ' object _ id ' : 2 }""" | if not self . pass_through_kwarg :
return { }
obj = self . get_object ( )
return { self . pass_through_kwarg : getattr ( obj , self . pass_through_attr ) } |
def add ( env , securitygroup_id , network_component , server , interface ) :
"""Attach an interface to a security group .""" | _validate_args ( network_component , server , interface )
mgr = SoftLayer . NetworkManager ( env . client )
component_id = _get_component_id ( env , network_component , server , interface )
ret = mgr . attach_securitygroup_component ( securitygroup_id , component_id )
if not ret :
raise exceptions . CLIAbort ( "Could not attach network component" )
table = formatting . Table ( REQUEST_COLUMNS )
table . add_row ( [ ret [ 'requestId' ] ] )
env . fout ( table ) |
def tsToDf ( tso ) :
"""Create Pandas DataFrame from TimeSeries object .
Use : Must first extractTs to get a time series . Then pick one item from time series and pass it through
: param dict tso : Time series entry
: return dict dfs : Pandas dataframes""" | dfs = { }
try :
dfs = ts_to_df ( tso )
except Exception as e :
print ( "Error: Unable to create data frame" )
logger_start . warn ( "ts_to_df: tso malformed: {}" . format ( e ) )
return dfs |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.