signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def _duplicateLayer ( self , layerName , newLayerName ) :
"""This is the environment implementation of : meth : ` BaseFont . duplicateLayer ` .
* * layerName * * will be a : ref : ` type - string ` representing a valid layer name .
The value will have been normalized with : func : ` normalizers . normalizeLayerName `
and * * layerName * * will be a layer that exists in the font . * * newLayerName * *
will be a : ref : ` type - string ` representing a valid layer name . The value will
have been normalized with : func : ` normalizers . normalizeLayerName ` and
* * newLayerName * * will have been tested to make sure that no layer with
the same name exists in the font . This must return an instance of a
: class : ` BaseLayer ` subclass .
Subclasses may override this method ."""
|
newLayer = self . getLayer ( layerName ) . copy ( )
return self . insertLayer ( newLayer , newLayerName )
|
def parse_yaml ( self , y ) :
'''Parse a YAML specification of a location into this object .'''
|
self . x = int ( y [ 'x' ] )
self . y = int ( y [ 'y' ] )
self . height = int ( y [ 'height' ] )
self . width = int ( y [ 'width' ] )
self . direction = dir . from_string ( y [ 'direction' ] )
return self
|
def getPrecision ( self , result = None ) :
"""Returns the precision for the Analysis .
- If ManualUncertainty is set , calculates the precision of the result
in accordance with the manual uncertainty set .
- If Calculate Precision from Uncertainty is set in Analysis Service ,
calculates the precision in accordance with the uncertainty infered
from uncertainties ranges .
- If neither Manual Uncertainty nor Calculate Precision from
Uncertainty are set , returns the precision from the Analysis Service
- If you have a number with zero uncertainty : If you roll a pair of
dice and observe five spots , the number of spots is 5 . This is a raw
data point , with no uncertainty whatsoever . So just write down the
number . Similarly , the number of centimeters per inch is 2.54,
by definition , with no uncertainty whatsoever . Again : just write
down the number .
Further information at AbstractBaseAnalysis . getPrecision ( )"""
|
allow_manual = self . getAllowManualUncertainty ( )
precision_unc = self . getPrecisionFromUncertainty ( )
if allow_manual or precision_unc :
uncertainty = self . getUncertainty ( result )
if uncertainty is None :
return self . getField ( 'Precision' ) . get ( self )
if uncertainty == 0 and result is None :
return self . getField ( 'Precision' ) . get ( self )
if uncertainty == 0 :
strres = str ( result )
numdecimals = strres [ : : - 1 ] . find ( '.' )
return numdecimals
return get_significant_digits ( uncertainty )
return self . getField ( 'Precision' ) . get ( self )
|
def get_analysis ( self , current_class ) :
"""Returns the : class : ` ~ androguard . core . analysis . analysis . Analysis ` object
which contains the ` current _ class ` .
: param current _ class : The class to search for
: type current _ class : androguard . core . bytecodes . dvm . ClassDefItem
: rtype : androguard . core . analysis . analysis . Analysis"""
|
for digest in self . analyzed_vms :
dx = self . analyzed_vms [ digest ]
if dx . is_class_present ( current_class . get_name ( ) ) :
return dx
return None
|
def do_copy_file ( self , args ) :
"""Determine if file should be copied or not and handle exceptions ."""
|
path , prefixed_path , source_storage = args
reset_connection ( self . storage )
if self . collectfast_enabled and not self . dry_run :
try :
if not should_copy_file ( self . storage , path , prefixed_path , source_storage ) :
return False
except Exception as e :
if settings . debug :
raise
# Ignore errors and let default collectstatic handle copy
self . stdout . write ( smart_str ( "Ignored error in Collectfast:\n%s\n--> Continuing using " "default collectstatic." % e ) )
self . num_copied_files += 1
return super ( Command , self ) . copy_file ( path , prefixed_path , source_storage )
|
def add_wf ( self , wf_obj ) :
"""Adds a token to the text layer
@ type wf _ obj : L { Cwf }
@ param wf _ obj : the token object"""
|
if self . text_layer is None :
self . text_layer = Ctext ( type = self . type )
self . root . append ( self . text_layer . get_node ( ) )
self . text_layer . add_wf ( wf_obj )
|
def assign_from_subscribed ( self , assignments ) :
"""Update the assignment to the specified partitions
This method is called by the coordinator to dynamically assign
partitions based on the consumer ' s topic subscription . This is different
from assign _ from _ user ( ) which directly sets the assignment from a
user - supplied TopicPartition list .
Arguments :
assignments ( list of TopicPartition ) : partitions to assign to this
consumer instance ."""
|
if not self . partitions_auto_assigned ( ) :
raise IllegalStateError ( self . _SUBSCRIPTION_EXCEPTION_MESSAGE )
for tp in assignments :
if tp . topic not in self . subscription :
raise ValueError ( "Assigned partition %s for non-subscribed topic." % ( tp , ) )
# after rebalancing , we always reinitialize the assignment state
self . assignment . clear ( )
for tp in assignments :
self . _add_assigned_partition ( tp )
self . needs_fetch_committed_offsets = True
log . info ( "Updated partition assignment: %s" , assignments )
|
def _assert_path_is_dir ( self , dir_path ) :
"""Args :
dir _ path :"""
|
if not os . path . isdir ( dir_path ) :
raise django . core . management . base . CommandError ( 'Invalid dir path. path="{}"' . format ( dir_path ) )
|
def calmar_ratio ( returns , period = DAILY , annualization = None ) :
"""Determines the Calmar ratio , or drawdown ratio , of a strategy .
Parameters
returns : pd . Series or np . ndarray
Daily returns of the strategy , noncumulative .
- See full explanation in : func : ` ~ empyrical . stats . cum _ returns ` .
period : str , optional
Defines the periodicity of the ' returns ' data for purposes of
annualizing . Value ignored if ` annualization ` parameter is specified .
Defaults are : :
' monthly ' : 12
' weekly ' : 52
' daily ' : 252
annualization : int , optional
Used to suppress default values available in ` period ` to convert
returns into annual returns . Value should be the annual frequency of
` returns ` .
Returns
calmar _ ratio : float
Calmar ratio ( drawdown ratio ) as float . Returns np . nan if there is no
calmar ratio .
Note
See https : / / en . wikipedia . org / wiki / Calmar _ ratio for more details ."""
|
max_dd = max_drawdown ( returns = returns )
if max_dd < 0 :
temp = annual_return ( returns = returns , period = period , annualization = annualization ) / abs ( max_dd )
else :
return np . nan
if np . isinf ( temp ) :
return np . nan
return temp
|
def db_user_exists ( username ) :
"""Return True if the DB user already exists ."""
|
qry = u"""SELECT COUNT(*) FROM pg_roles where rolname = \'{username}\';"""
output = StringIO ( )
excute_query ( qry . format ( username = username ) , flags = "-Aqt" , use_sudo = True , stdout = output )
# FIXME : is there a way to get fabric to not clutter the output
# with " [ 127.0.0.1 ] out : " on each line ?
lines = output . getvalue ( ) . splitlines ( )
return lines and lines [ 0 ] . endswith ( 'out: 1' )
|
def binary_size ( self ) :
'''Return the number of bytes to store this group and its parameters .'''
|
return ( 1 + # group _ id
1 + len ( self . name . encode ( 'utf-8' ) ) + # size of name and name bytes
2 + # next offset marker
1 + len ( self . desc . encode ( 'utf-8' ) ) + # size of desc and desc bytes
sum ( p . binary_size ( ) for p in self . params . values ( ) ) )
|
def parse_seq ( tokens , options ) :
"""seq : : = ( atom [ ' . . . ' ] ) * ;"""
|
result = [ ]
while tokens . current ( ) not in [ None , ']' , ')' , '|' ] :
atom = parse_atom ( tokens , options )
if tokens . current ( ) == '...' :
atom = [ OneOrMore ( * atom ) ]
tokens . move ( )
result += atom
return result
|
def fader ( self , value : int ) :
"""Move the fader to a new position in the range 0 to 1023."""
|
self . _fader = int ( value ) if 0 < value < 1024 else 0
self . outport . send ( mido . Message ( 'control_change' , control = 0 , value = self . _fader >> 7 ) )
self . outport . send ( mido . Message ( 'control_change' , control = 32 , value = self . _fader & 0x7F ) )
|
def _create_dns_list ( self , dns ) :
""": param dns :
: return :"""
|
if not dns :
return None
dns_list = [ ]
if isinstance ( dns , six . string_types ) :
if is_valid_ip ( dns ) :
dns_list . append ( dns )
else :
raise ValueError ( "dns is required to be a valid ip adress. {0} was passed." . format ( dns ) )
elif isinstance ( dns , list ) :
for dns_entry in dns :
if is_valid_ip ( dns_entry ) :
dns_list . append ( dns_entry )
else :
raise ValueError ( "dns is required to be a valid ip adress. {0} was passed." . format ( dns ) )
else :
raise ValueError ( "dns and dns search must be a list or string. {0} was passed." . format ( dns ) )
return dns_list
|
def workdays ( first_day = None ) :
"""Returns a list of workday names .
Arguments
first _ day : str , default None
The first day of the five - day work week . If not given , ' Monday ' is
used .
Returns
list
A list of workday names ."""
|
if first_day is None :
first_day = 'Monday'
ix = _lower_weekdays ( ) . index ( first_day . lower ( ) )
return _double_weekdays ( ) [ ix : ix + 5 ]
|
def process_content ( self , content , filename = None , content_type = None ) :
"""Standard implementation of : meth : ` . DepotFileInfo . process _ content `
This is the standard depot implementation of files upload , it will
store the file on the default depot and will provide the standard
attributes .
Subclasses will need to call this method to ensure the standard
set of attributes is provided ."""
|
file_path , file_id = self . store_content ( content , filename , content_type )
self [ 'file_id' ] = file_id
self [ 'path' ] = file_path
saved_file = self . file
self [ 'filename' ] = saved_file . filename
self [ 'content_type' ] = saved_file . content_type
self [ 'uploaded_at' ] = saved_file . last_modified . strftime ( '%Y-%m-%d %H:%M:%S' )
self [ '_public_url' ] = saved_file . public_url
|
def print_model ( self , include_unsigned_edges = False ) :
"""Return a SIF string of the assembled model .
Parameters
include _ unsigned _ edges : bool
If True , includes edges with an unknown activating / inactivating
relationship ( e . g . , most PTMs ) . Default is False ."""
|
sif_str = ''
for edge in self . graph . edges ( data = True ) :
n1 = edge [ 0 ]
n2 = edge [ 1 ]
data = edge [ 2 ]
polarity = data . get ( 'polarity' )
if polarity == 'negative' :
rel = '-1'
elif polarity == 'positive' :
rel = '1'
elif include_unsigned_edges :
rel = '0'
else :
continue
sif_str += '%s %s %s\n' % ( n1 , rel , n2 )
return sif_str
|
def _compute_callables_count ( self , iters : Dict [ str , Any ] ) :
"""Iterate over all issues and count the number of times each callable
is seen ."""
|
count = dict . fromkeys ( [ issue [ "callable" ] for issue in iters [ "issues" ] ] , 0 )
for issue in iters [ "issues" ] :
count [ issue [ "callable" ] ] += 1
return count
|
def pose_to_list ( pose ) :
"""Convert a Pose or PoseStamped in Python list ( ( position ) , ( quaternion ) )
: param pose : geometry _ msgs . msg . PoseStamped or geometry _ msgs . msg . Pose
: return : the equivalent in list ( ( position ) , ( quaternion ) )"""
|
if type ( pose ) == geometry_msgs . msg . PoseStamped :
return [ [ pose . pose . position . x , pose . pose . position . y , pose . pose . position . z ] , [ pose . pose . orientation . x , pose . pose . orientation . y , pose . pose . orientation . z , pose . pose . orientation . w ] ]
elif type ( pose ) == geometry_msgs . msg . Pose :
return [ [ pose . position . x , pose . position . y , pose . position . z ] , [ pose . orientation . x , pose . orientation . y , pose . orientation . z , pose . orientation . w ] ]
else :
raise Exception ( "pose_to_list: parameter of type %s unexpected" , str ( type ( pose ) ) )
|
def canonicalize ( message ) :
"""Function to convert an email Message to standard format string
: param message : email . Message to be converted to standard string
: return : the standard representation of the email message in bytes"""
|
if message . is_multipart ( ) or message . get ( 'Content-Transfer-Encoding' ) != 'binary' :
return mime_to_bytes ( message , 0 ) . replace ( b'\r\n' , b'\n' ) . replace ( b'\r' , b'\n' ) . replace ( b'\n' , b'\r\n' )
else :
message_header = ''
message_body = message . get_payload ( decode = True )
for k , v in message . items ( ) :
message_header += '{}: {}\r\n' . format ( k , v )
message_header += '\r\n'
return message_header . encode ( 'utf-8' ) + message_body
|
def read_requirements_file ( path ) :
"""reads requirements . txt file and handles PyPI index URLs
: param path : ( str ) path to requirements . txt file
: return : ( tuple of lists )"""
|
last_pypi_url = None
with open ( path ) as f :
requires = [ ]
pypi_urls = [ ]
for line in f . readlines ( ) :
if not line :
continue
if '--' in line :
match = re . match ( r'--index-url\s+([\w\d:/.-]+)\s' , line )
if match :
last_pypi_url = match . group ( 1 )
if not last_pypi_url . endswith ( "/" ) :
last_pypi_url += "/"
else :
if last_pypi_url :
pypi_urls . append ( last_pypi_url + line . strip ( ) . lower ( ) )
requires . append ( line )
return requires , pypi_urls
|
def file_rights ( filepath , mode = None , uid = None , gid = None ) :
'''Change file rights'''
|
file_handle = os . open ( filepath , os . O_RDONLY )
if mode :
os . fchmod ( file_handle , mode )
if uid :
if not gid :
gid = 0
os . fchown ( file_handle , uid , gid )
os . close ( file_handle )
|
def linestrings_intersect ( line1 , line2 ) :
"""To valid whether linestrings from geojson are intersected with each other .
reference : http : / / www . kevlindev . com / gui / math / intersection / Intersection . js
Keyword arguments :
line1 - - first line geojson object
line2 - - second line geojson object
if ( line1 intersects with other ) return intersect point array else empty array"""
|
intersects = [ ]
for i in range ( 0 , len ( line1 [ 'coordinates' ] ) - 1 ) :
for j in range ( 0 , len ( line2 [ 'coordinates' ] ) - 1 ) :
a1_x = line1 [ 'coordinates' ] [ i ] [ 1 ]
a1_y = line1 [ 'coordinates' ] [ i ] [ 0 ]
a2_x = line1 [ 'coordinates' ] [ i + 1 ] [ 1 ]
a2_y = line1 [ 'coordinates' ] [ i + 1 ] [ 0 ]
b1_x = line2 [ 'coordinates' ] [ j ] [ 1 ]
b1_y = line2 [ 'coordinates' ] [ j ] [ 0 ]
b2_x = line2 [ 'coordinates' ] [ j + 1 ] [ 1 ]
b2_y = line2 [ 'coordinates' ] [ j + 1 ] [ 0 ]
ua_t = ( b2_x - b1_x ) * ( a1_y - b1_y ) - ( b2_y - b1_y ) * ( a1_x - b1_x )
ub_t = ( a2_x - a1_x ) * ( a1_y - b1_y ) - ( a2_y - a1_y ) * ( a1_x - b1_x )
u_b = ( b2_y - b1_y ) * ( a2_x - a1_x ) - ( b2_x - b1_x ) * ( a2_y - a1_y )
if not u_b == 0 :
u_a = ua_t / u_b
u_b = ub_t / u_b
if 0 <= u_a and u_a <= 1 and 0 <= u_b and u_b <= 1 :
intersects . append ( { 'type' : 'Point' , 'coordinates' : [ a1_x + u_a * ( a2_x - a1_x ) , a1_y + u_a * ( a2_y - a1_y ) ] } )
# if len ( intersects ) = = 0:
# intersects = False
return intersects
|
def save ( self ) :
"""Save the row back to the spreadsheet"""
|
if self . _sheet . readonly :
raise ReadOnlyException
if not self . _changed : # nothing to save
return
gd_client = self . _sheet . client
assert gd_client is not None
try :
entry = gd_client . UpdateRow ( self . _entry , self . _data )
except RequestError as e :
error_data = e . args [ 0 ]
if error_data . status == 403 : # Forbidden
raise
if error_data . status == 409 : # conflict
raise
else :
raise
self . _entry = entry
# reset ` _ changed ` flag
self . _changed = False
return entry
|
def load_data_and_build ( self , filename , delimiter = "," ) :
"""Convenience function for directly working with a data file .
This opens a file and reads the data into an array , sets the
data as an nparray and list of dimnames
@ In , filename , string representing the data file"""
|
data = np . genfromtxt ( filename , dtype = float , delimiter = delimiter , names = True )
data = data . view ( np . float64 ) . reshape ( data . shape + ( - 1 , ) )
X = data [ : , 0 : - 1 ]
Y = data [ : , - 1 ]
self . build ( X = X , Y = Y )
|
def returner ( ret ) :
'''Send an PushOver message with the data'''
|
_options = _get_options ( ret )
user = _options . get ( 'user' )
device = _options . get ( 'device' )
token = _options . get ( 'token' )
title = _options . get ( 'title' )
priority = _options . get ( 'priority' )
expire = _options . get ( 'expire' )
retry = _options . get ( 'retry' )
sound = _options . get ( 'sound' )
if not token :
raise SaltInvocationError ( 'Pushover token is unavailable.' )
if not user :
raise SaltInvocationError ( 'Pushover user key is unavailable.' )
if priority and priority == 2 :
if not expire and not retry :
raise SaltInvocationError ( 'Priority 2 requires pushover.expire and pushover.retry options.' )
message = ( 'id: {0}\r\n' 'function: {1}\r\n' 'function args: {2}\r\n' 'jid: {3}\r\n' 'return: {4}\r\n' ) . format ( ret . get ( 'id' ) , ret . get ( 'fun' ) , ret . get ( 'fun_args' ) , ret . get ( 'jid' ) , pprint . pformat ( ret . get ( 'return' ) ) )
result = _post_message ( user = user , device = device , message = message , title = title , priority = priority , expire = expire , retry = retry , sound = sound , token = token )
log . debug ( 'pushover result %s' , result )
if not result [ 'res' ] :
log . info ( 'Error: %s' , result [ 'message' ] )
return
|
def launch ( ) :
"""Ensure that python is running from the Lore virtualenv past this point ."""
|
if launched ( ) :
check_version ( )
os . chdir ( ROOT )
return
if not os . path . exists ( BIN_LORE ) :
missing = ' %s virtualenv is missing.' % APP
if '--launched' in sys . argv :
sys . exit ( ansi . error ( ) + missing + ' Please check for errors during:\n $ lore install\n' )
else :
print ( ansi . warning ( ) + missing )
import lore . __main__
lore . __main__ . install ( None , None )
reboot ( '--env-launched' )
|
def decrease_posts_count_after_post_unaproval ( sender , instance , ** kwargs ) :
"""Decreases the member ' s post count after a post unaproval .
This receiver handles the unaproval of a forum post : the posts count associated with the post ' s
author is decreased ."""
|
if not instance . pk : # Do not consider posts being created .
return
profile , dummy = ForumProfile . objects . get_or_create ( user = instance . poster )
try :
old_instance = instance . __class__ . _default_manager . get ( pk = instance . pk )
except ObjectDoesNotExist : # pragma : no cover
# This should never happen ( except with django loaddata command )
return
if old_instance and old_instance . approved is True and instance . approved is False :
profile . posts_count = F ( 'posts_count' ) - 1
profile . save ( )
|
def module_broadcast ( m , broadcast_fn , * args , ** kwargs ) :
"""Call given function in all submodules with given parameters"""
|
apply_leaf ( m , lambda x : module_apply_broadcast ( x , broadcast_fn , args , kwargs ) )
|
def build_or_reuse_placeholder ( tensor_spec ) :
"""Build a tf . placeholder from the metadata in the given tensor spec , or return an existing one .
Args :
tensor _ spec ( tf . TensorSpec ) :
Returns :
tf . Tensor :"""
|
g = tfv1 . get_default_graph ( )
name = tensor_spec . name
try :
tensor = g . get_tensor_by_name ( name + ':0' )
assert "Placeholder" in tensor . op . type , "Tensor {} exists but is not a placeholder!" . format ( name )
assert tensor_spec . is_compatible_with ( tensor ) , "Tensor {} exists but is not compatible with the signature!" . format ( tensor )
return tensor
except KeyError :
with tfv1 . name_scope ( None ) : # clear any name scope it might get called in
ret = tfv1 . placeholder ( tensor_spec . dtype , shape = tensor_spec . shape , name = tensor_spec . name )
return ret
|
def set_figure_params ( self , scanpy = True , dpi = 80 , dpi_save = 150 , frameon = True , vector_friendly = True , fontsize = 14 , color_map = None , format = "pdf" , transparent = False , ipython_format = "png2x" , ) :
"""Set resolution / size , styling and format of figures .
Parameters
scanpy : ` bool ` , optional ( default : ` True ` )
Init default values for ` ` matplotlib . rcParams ` ` suited for Scanpy .
dpi : ` int ` , optional ( default : ` 80 ` )
Resolution of rendered figures - this influences the size of figures in notebooks .
dpi _ save : ` int ` , optional ( default : ` 150 ` )
Resolution of saved figures . This should typically be higher to achieve
publication quality .
frameon : ` bool ` , optional ( default : ` True ` )
Add frames and axes labels to scatter plots .
vector _ friendly : ` bool ` , optional ( default : ` True ` )
Plot scatter plots using ` png ` backend even when exporting as ` pdf ` or ` svg ` .
fontsize : ` int ` , optional ( default : 14)
Set the fontsize for several ` rcParams ` entries . Ignored if ` scanpy = False ` .
color _ map : ` str ` , optional ( default : ` None ` )
Convenience method for setting the default color map . Ignored if ` scanpy = False ` .
format : { ' png ' , ' pdf ' , ' svg ' , etc . } , optional ( default : ' pdf ' )
This sets the default format for saving figures : ` file _ format _ figs ` .
transparent : ` bool ` , optional ( default : ` True ` )
Save figures with transparent back ground . Sets
` rcParams [ ' savefig . transparent ' ] ` .
ipython _ format : list of ` str ` , optional ( default : ' png2x ' )
Only concerns the notebook / IPython environment ; see
` IPython . core . display . set _ matplotlib _ formats ` for more details ."""
|
try :
import IPython
IPython . core . display . set_matplotlib_formats ( ipython_format )
except :
pass
from matplotlib import rcParams
self . _vector_friendly = vector_friendly
self . file_format_figs = format
if dpi is not None :
rcParams [ "figure.dpi" ] = dpi
if dpi_save is not None :
rcParams [ "savefig.dpi" ] = dpi_save
if transparent is not None :
rcParams [ "savefig.transparent" ] = transparent
if scanpy :
from . plotting . _rcmod import set_rcParams_scanpy
set_rcParams_scanpy ( fontsize = fontsize , color_map = color_map )
self . _frameon = frameon
|
def reverse_default ( self , subid , ipaddr , params = None ) :
'''/ v1 / server / reverse _ default _ ipv4
POST - account
Set a reverse DNS entry for an IPv4 address of a virtual
machine to the original setting . Upon success , DNS changes
may take 6-12 hours to become active .
Link : https : / / www . vultr . com / api / # server _ reverse _ default _ ipv4'''
|
params = update_params ( params , { 'SUBID' : subid , 'ip' : ipaddr } )
return self . request ( '/v1/server/reverse_default_ipv4' , params , 'POST' )
|
def unregister_namespace ( self , namespace ) :
"""Unregister a namespace .
: param namespace : Namespace tag .
: type namespace : str"""
|
if namespace not in self . _namespaces :
raise NoRegisteredError ( "Namespace '{0}' is not registered on loader." . format ( namespace ) )
del self . _namespaces [ namespace ]
|
def mtotal_from_tau0_tau3 ( tau0 , tau3 , f_lower , in_seconds = False ) :
r"""Returns total mass from : math : ` \ tau _ 0 , \ tau _ 3 ` ."""
|
mtotal = ( tau3 / _a3 ( f_lower ) ) / ( tau0 / _a0 ( f_lower ) )
if not in_seconds : # convert back to solar mass units
mtotal /= lal . MTSUN_SI
return mtotal
|
def add_point_labels ( self , points , labels , italic = False , bold = True , font_size = None , text_color = 'k' , font_family = None , shadow = False , show_points = True , point_color = 'k' , point_size = 5 , name = None ) :
"""Creates a point actor with one label from list labels assigned to
each point .
Parameters
points : np . ndarray
n x 3 numpy array of points .
labels : list
List of labels . Must be the same length as points .
italic : bool , optional
Italicises title and bar labels . Default False .
bold : bool , optional
Bolds title and bar labels . Default True
font _ size : float , optional
Sets the size of the title font . Defaults to 16.
text _ color : string or 3 item list , optional , defaults to black
Color of text .
Either a string , rgb list , or hex color string . For example :
text _ color = ' white '
text _ color = ' w '
text _ color = [ 1 , 1 , 1]
text _ color = ' # FFFFF '
font _ family : string , optional
Font family . Must be either courier , times , or arial .
shadow : bool , optional
Adds a black shadow to the text . Defaults to False
show _ points : bool , optional
Controls if points are visible . Default True
point _ color : string or 3 item list , optional , defaults to black
Color of points ( if visible ) .
Either a string , rgb list , or hex color string . For example :
text _ color = ' white '
text _ color = ' w '
text _ color = [ 1 , 1 , 1]
text _ color = ' # FFFFF '
point _ size : float , optional
Size of points ( if visible )
name : str , optional
The name for the added actor so that it can be easily updated .
If an actor of this name already exists in the rendering window , it
will be replaced by the new actor .
Returns
labelMapper : vtk . vtkvtkLabeledDataMapper
VTK label mapper . Can be used to change properties of the labels ."""
|
if font_family is None :
font_family = rcParams [ 'font' ] [ 'family' ]
if font_size is None :
font_size = rcParams [ 'font' ] [ 'size' ]
if len ( points ) != len ( labels ) :
raise Exception ( 'There must be one label for each point' )
vtkpoints = vtki . PolyData ( points )
vtklabels = vtk . vtkStringArray ( )
vtklabels . SetName ( 'labels' )
for item in labels :
vtklabels . InsertNextValue ( str ( item ) )
vtkpoints . GetPointData ( ) . AddArray ( vtklabels )
# create label mapper
labelMapper = vtk . vtkLabeledDataMapper ( )
labelMapper . SetInputData ( vtkpoints )
textprop = labelMapper . GetLabelTextProperty ( )
textprop . SetItalic ( italic )
textprop . SetBold ( bold )
textprop . SetFontSize ( font_size )
textprop . SetFontFamily ( parse_font_family ( font_family ) )
textprop . SetColor ( parse_color ( text_color ) )
textprop . SetShadow ( shadow )
labelMapper . SetLabelModeToLabelFieldData ( )
labelMapper . SetFieldDataName ( 'labels' )
labelActor = vtk . vtkActor2D ( )
labelActor . SetMapper ( labelMapper )
# add points
if show_points :
style = 'points'
else :
style = 'surface'
self . add_mesh ( vtkpoints , style = style , color = point_color , point_size = point_size )
self . add_actor ( labelActor , reset_camera = False , name = name )
return labelMapper
|
def DirectoryStimuliFactory ( loader ) :
"""Takes an input path to the images folder of an experiment and generates
automatically the category - filenumber list needed to construct an
appropriate _ categories object .
Parameters :
loader : Loader object which contains
impath : string
path to the input , i . e . image - , files of the experiment . All
subfolders in that path will be treated as categories . If no
subfolders are present , category 1 will be assigned and all
files in the folder are considered input images .
Images have to end in ' . png ' .
ftrpath : string
path to the feature folder . It is expected that the folder
structure corresponds to the structure in impath , i . e .
ftrpath / category / featurefolder / featuremap . mat
Furthermore , features are assumed to be the same for all
categories ."""
|
impath = loader . impath
ftrpath = loader . ftrpath
# checks whether user has reading permission for the path
assert os . access ( impath , os . R_OK )
assert os . access ( ftrpath , os . R_OK )
# EXTRACTING IMAGE NAMES
img_per_cat = { }
# extract only directories in the given folder
subfolders = [ name for name in os . listdir ( impath ) if os . path . isdir ( os . path . join ( impath , name ) ) ]
# if there are no subfolders , walk through files . Take 1 as key for the
# categories object
if not subfolders :
[ _ , _ , files ] = next ( os . walk ( os . path . join ( impath ) ) )
# this only takes entries that end with ' . png '
entries = { 1 : [ int ( cur_file [ cur_file . find ( '_' ) + 1 : - 4 ] ) for cur_file in files if cur_file . endswith ( '.png' ) ] }
img_per_cat . update ( entries )
subfolders = [ '' ]
# if there are subfolders , walk through them
else :
for directory in subfolders :
[ _ , _ , files ] = next ( os . walk ( os . path . join ( impath , directory ) ) )
# this only takes entries that end with ' . png ' . Strips ending and
# considers everything after the first ' _ ' as the imagenumber
imagenumbers = [ int ( cur_file [ cur_file . find ( '_' ) + 1 : - 4 ] ) for cur_file in files if ( cur_file . endswith ( '.png' ) & ( len ( cur_file ) > 4 ) ) ]
entries = { int ( directory ) : imagenumbers }
img_per_cat . update ( entries )
del directory
del imagenumbers
# in case subfolders do not exist , ' ' is appended here .
_ , features , files = next ( os . walk ( os . path . join ( ftrpath , subfolders [ 0 ] ) ) )
return Categories ( loader , img_per_cat = img_per_cat , features = features )
|
def PureMultiHeadedAttention ( x , params , num_heads = 8 , dropout = 0.0 , mode = 'train' , ** kwargs ) :
"""Pure transformer - style multi - headed attention .
Args :
x : inputs ( ( q , k , v ) , mask )
params : parameters ( none )
num _ heads : int : number of attention heads
dropout : float : dropout rate
mode : str : ' train ' or ' eval '
* * kwargs : other arguments including the rng
Returns :
Pure Multi - headed attention layer ( no Dense transforms on input ) ."""
|
del params
rng = kwargs . get ( 'rng' , None )
( q , k , v ) , mask = x
feature_depth = q . shape [ - 1 ]
assert feature_depth % num_heads == 0
head_depth = feature_depth // num_heads
nbatch = np . shape ( q ) [ 0 ]
# nbatch , seqlen , feature _ depth - - > nbatch , num _ heads , seqlen , head _ depth
def SplitHeads ( x ) :
return np . transpose ( np . reshape ( x , ( nbatch , - 1 , num_heads , head_depth ) ) , ( 0 , 2 , 1 , 3 ) )
# nbatch , num _ heads , seqlen , head _ depth - - > nbatch , seqlen , feature _ depth
def JoinHeads ( x ) : # pylint : disable = invalid - name
return np . reshape ( np . transpose ( x , ( 0 , 2 , 1 , 3 ) ) , ( nbatch , - 1 , num_heads * head_depth ) )
# Split heads , dot - product attention , rejoin heads .
return JoinHeads ( DotProductAttention ( SplitHeads ( q ) , SplitHeads ( k ) , SplitHeads ( v ) , mask , dropout = dropout , mode = mode , rng = rng ) )
|
def t_IDENTIFIER ( t ) :
r"[ A - Z _ a - z ] [ 0-9A - Z _ a - z ] *"
|
if t . value in keywords :
t . type = t . value
return t
|
def parse_networks_output ( out ) :
"""Parses the output of the Docker CLI ' docker network ls ' and returns it in the format similar to the Docker API .
: param out : CLI output .
: type out : unicode | str
: return : Parsed result .
: rtype : list [ dict ]"""
|
if not out :
return [ ]
line_iter = islice ( out . splitlines ( ) , 1 , None )
# Skip header
return list ( map ( _network_info , line_iter ) )
|
def add_to_current_action ( self , controller ) :
"""Add a controller to the current action ."""
|
item = self . current_item
self . _history [ self . _index ] = item + ( controller , )
|
def as_dict ( self ) -> Dict [ str , Any ] :
"""The names and values of all options .
. . versionadded : : 3.1"""
|
return dict ( ( opt . name , opt . value ( ) ) for name , opt in self . _options . items ( ) )
|
def connect ( self , peer_address ) :
"""Client - side UDP connection establishment
This method connects this object ' s underlying socket . It subsequently
performs a handshake if do _ handshake _ on _ connect was set during
initialization .
Arguments :
peer _ address - address tuple of server peer"""
|
self . _sock . connect ( peer_address )
peer_address = self . _sock . getpeername ( )
# substituted host addrinfo
BIO_dgram_set_connected ( self . _wbio . value , peer_address )
assert self . _wbio is self . _rbio
if self . _do_handshake_on_connect :
self . do_handshake ( )
|
def get_file ( self , target_path , host_path , note = None , loglevel = logging . DEBUG ) :
"""Copy a file from the target machine to the host machine
@ param target _ path : path to file in the target
@ param host _ path : path to file on the host machine ( e . g . copy test )
@ param note : See send ( )
@ type target _ path : string
@ type host _ path : string
@ return : boolean
@ rtype : string"""
|
shutit_global . shutit_global_object . yield_to_draw ( )
self . handle_note ( note )
# Only handle for docker initially , return false in case we care
if self . build [ 'delivery' ] != 'docker' :
return False
# on the host , run :
# Usage : docker cp [ OPTIONS ] CONTAINER : PATH LOCALPATH | -
# Need : host env , container id , path from and path to
shutit_pexpect_child = self . get_shutit_pexpect_session_from_id ( 'host_child' ) . pexpect_child
expect = self . expect_prompts [ 'ORIGIN_ENV' ]
self . send ( 'docker cp ' + self . target [ 'container_id' ] + ':' + target_path + ' ' + host_path , shutit_pexpect_child = shutit_pexpect_child , expect = expect , check_exit = False , echo = False , loglevel = loglevel )
self . handle_note_after ( note = note )
return True
|
def save ( self , backref_obj = None ) :
"""Save the cleaned data to the initial object or creating a new one
( if a ` model _ class ` was provided ) ."""
|
if not self . validated :
assert self . is_valid ( )
if self . _model and not self . _obj :
obj = self . _save_new_object ( backref_obj )
else :
obj = self . save_to ( self . _obj )
for key , subform in self . _forms . items ( ) :
data = subform . save ( obj )
if self . _model and not data :
continue
set_obj_value ( obj , key , data )
for key , formset in self . _sets . items ( ) :
data = formset . save ( obj )
if self . _model and not data :
continue
set_obj_value ( obj , key , data )
return obj
|
def open ( self , baudrate = None , no_reader_thread = False ) :
"""Opens the device .
If the device cannot be opened , an exception is thrown . In that
case , open ( ) can be called repeatedly to try and open the
connection .
: param baudrate : baudrate used for the device . Defaults to the lower - level device default .
: type baudrate : int
: param no _ reader _ thread : Specifies whether or not the automatic reader
thread should be started .
: type no _ reader _ thread : bool"""
|
self . _wire_events ( )
try :
self . _device . open ( baudrate = baudrate , no_reader_thread = no_reader_thread )
except :
self . _unwire_events
raise
return self
|
def _scan_pth_files ( dir_paths ) :
"""Given an iterable of directory paths , yield paths to all . pth files within ."""
|
for dir_path in dir_paths :
if not os . path . exists ( dir_path ) :
continue
pth_filenames = ( f for f in os . listdir ( dir_path ) if f . endswith ( '.pth' ) )
for pth_filename in pth_filenames :
yield os . path . join ( dir_path , pth_filename )
|
def _onDecorator ( self , name , line , pos , absPosition ) :
"Memorizes a function or a class decorator"
|
# A class or a function must be on the top of the stack
self . objectsStack [ - 1 ] . decorators . append ( Decorator ( name , line , pos , absPosition ) )
return
|
def get_plan_from_dual ( alpha , beta , C , regul ) :
"""Retrieve optimal transportation plan from optimal dual potentials .
Parameters
alpha : array , shape = len ( a )
beta : array , shape = len ( b )
Optimal dual potentials .
C : array , shape = len ( a ) x len ( b )
Ground cost matrix .
regul : Regularization object
Should implement a delta _ Omega ( X ) method .
Returns
T : array , shape = len ( a ) x len ( b )
Optimal transportation plan ."""
|
X = alpha [ : , np . newaxis ] + beta - C
return regul . delta_Omega ( X ) [ 1 ]
|
def show_vcs_output_total_nodes_in_cluster ( self , ** kwargs ) :
"""Auto Generated Code"""
|
config = ET . Element ( "config" )
show_vcs = ET . Element ( "show_vcs" )
config = show_vcs
output = ET . SubElement ( show_vcs , "output" )
total_nodes_in_cluster = ET . SubElement ( output , "total-nodes-in-cluster" )
total_nodes_in_cluster . text = kwargs . pop ( 'total_nodes_in_cluster' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config )
|
def _get_config ( ** kwargs ) :
'''Return configuration'''
|
config = { 'box_type' : 'sealedbox' , 'sk' : None , 'sk_file' : os . path . join ( kwargs [ 'opts' ] . get ( 'pki_dir' ) , 'master/nacl' ) , 'pk' : None , 'pk_file' : os . path . join ( kwargs [ 'opts' ] . get ( 'pki_dir' ) , 'master/nacl.pub' ) , }
config_key = '{0}.config' . format ( __virtualname__ )
try :
config . update ( __salt__ [ 'config.get' ] ( config_key , { } ) )
except ( NameError , KeyError ) as e : # likely using salt - run so fallback to _ _ opts _ _
config . update ( kwargs [ 'opts' ] . get ( config_key , { } ) )
# pylint : disable = C0201
for k in set ( config . keys ( ) ) & set ( kwargs . keys ( ) ) :
config [ k ] = kwargs [ k ]
return config
|
def encodeStringForB64Header ( s ) :
"""HTTP Headers can ' t have new lines in them , let ' s"""
|
if isinstance ( s , six . text_type ) :
s = s . encode ( )
return base64 . encodestring ( s ) . strip ( ) . replace ( b'\n' , b'' )
|
def delete_item ( key ) :
"""Delete cached file if present"""
|
CACHED_KEY_FILE = os . path . join ( CURRENT_DIR , key )
if os . path . isfile ( CACHED_KEY_FILE ) :
os . remove ( CACHED_KEY_FILE )
|
def parseExtensionArgs ( self , args , is_openid1 , strict = False ) :
"""Set the state of this request to be that expressed in these
PAPE arguments
@ param args : The PAPE arguments without a namespace
@ param strict : Whether to raise an exception if the input is
out of spec or otherwise malformed . If strict is false ,
malformed input will be ignored .
@ param is _ openid1 : Whether the input should be treated as part
of an OpenID1 request
@ rtype : None
@ raises ValueError : When the max _ auth _ age is not parseable as
an integer"""
|
# preferred _ auth _ policies is a space - separated list of policy URIs
self . preferred_auth_policies = [ ]
policies_str = args . get ( 'preferred_auth_policies' )
if policies_str :
for uri in policies_str . split ( ' ' ) :
if uri not in self . preferred_auth_policies :
self . preferred_auth_policies . append ( uri )
# max _ auth _ age is base - 10 integer number of seconds
max_auth_age_str = args . get ( 'max_auth_age' )
self . max_auth_age = None
if max_auth_age_str :
try :
self . max_auth_age = int ( max_auth_age_str )
except ValueError :
if strict :
raise
# Parse auth level information
preferred_auth_level_types = args . get ( 'preferred_auth_level_types' )
if preferred_auth_level_types :
aliases = preferred_auth_level_types . strip ( ) . split ( )
for alias in aliases :
key = 'auth_level.ns.%s' % ( alias , )
try :
uri = args [ key ]
except KeyError :
if is_openid1 :
uri = self . _default_auth_level_aliases . get ( alias )
else :
uri = None
if uri is None :
if strict :
raise ValueError ( 'preferred auth level %r is not ' 'defined in this message' % ( alias , ) )
else :
self . addAuthLevel ( uri , alias )
|
def pink ( N , depth = 80 ) :
"""N - length vector with ( approximate ) pink noise
pink noise has 1 / f PSD"""
|
a = [ ]
s = iterpink ( depth )
for n in range ( N ) :
a . append ( next ( s ) )
return a
|
def get_xmlns_string ( self , ns_uris = None , sort = False , preferred_prefixes_only = True , delim = "\n" ) :
"""Generates XML namespace declarations for namespaces in this
set . It must be suitable for use in an actual XML document ,
so an exception is raised if this can ' t be done , e . g . if it would
have more than one default namespace declaration .
If ` ` preferred _ prefixes _ only ` ` is ` ` True ` ` and a namespace ' s prefix
preference is to be a default namespace , a default declaration will
be used if possible . If that ' s not possible , a prefix will be
chosen ( is this a good idea ? ) . If a default declaration can ' t be used
and no other prefixes are defined , an exception is raised .
Args :
ns _ uris ( iterable ) : If non - None , it should be an iterable over
namespace URIs . Only the given namespaces will occur in the
returned string . If None , all namespace are included .
sort ( bool ) : If True , the string is constructed from URIs in sorted
order .
preferred _ prefixes _ only ( bool ) : Whether to include only the
preferred prefix or all of them , for each namespace .
delim ( str ) : The delimiter to use between namespace declarations .
Should be some kind of whitespace .
Returns :
str : A string in the following format :
` ` xmlns : foo = " bar " < delim > xmlns : foo2 = " bar2 " < delim > . . . ` `
Raises :
NamespaceNotFoundError : If ` ` ns _ uris ` ` is given and contains any
URIs not in this set .
TooManyDefaultNamespacesError : If too many namespaces didn ' t have
a prefix . The algorithm is very simple for deciding whose
default preference is honored : the first default preference
encountered gets to be default . Any subsequent namespaces
without any prefixes will cause this error ."""
|
if ns_uris is None :
ns_uris = self . namespace_uris
if sort :
ns_uris = sorted ( ns_uris )
have_default = False
# Flag for default xmlns entry .
xmlns_entries = [ ]
# Stores all the xmlns : prefix = uri entries .
for ns_uri in ns_uris :
ni = self . __lookup_uri ( ns_uri )
if preferred_prefixes_only :
if ni . preferred_prefix is not None :
xmlns = 'xmlns:{0.preferred_prefix}="{0.uri}"' . format ( ni )
xmlns_entries . append ( xmlns )
else :
xmlns = 'xmlns:{0}="{1.uri}"'
xmlns_entries . extend ( xmlns . format ( prefix , ni ) for prefix in ni . prefixes )
if ni . preferred_prefix is None :
if have_default : # Already have a default namespace ; try to choose a prefix
# for this one from the set of registered prefixes .
if len ( ni . prefixes ) == 0 :
raise TooManyDefaultNamespacesError ( ni . uri )
elif preferred_prefixes_only :
prefix = next ( iter ( ni . prefixes ) )
xmlns = 'xmlns:{0}="{1.uri}"' . format ( prefix , ni )
xmlns_entries . append ( xmlns )
# else , we already declared some prefixes for this
# namespace , so don ' t worry about our inability to use this
# as a default namespace .
else :
xmlns = 'xmlns="{0.uri}"' . format ( ni )
xmlns_entries . append ( xmlns )
have_default = True
xmlns_str = delim . join ( xmlns_entries ) + delim
return xmlns_str
|
def _start_services_on_ads ( ads ) :
"""Starts long running services on multiple AndroidDevice objects .
If any one AndroidDevice object fails to start services , cleans up all
existing AndroidDevice objects and their services .
Args :
ads : A list of AndroidDevice objects whose services to start ."""
|
running_ads = [ ]
for ad in ads :
running_ads . append ( ad )
start_logcat = not getattr ( ad , KEY_SKIP_LOGCAT , DEFAULT_VALUE_SKIP_LOGCAT )
try :
ad . services . register ( SERVICE_NAME_LOGCAT , logcat . Logcat , start_service = start_logcat )
except Exception :
is_required = getattr ( ad , KEY_DEVICE_REQUIRED , DEFAULT_VALUE_DEVICE_REQUIRED )
if is_required :
ad . log . exception ( 'Failed to start some services, abort!' )
destroy ( running_ads )
raise
else :
ad . log . exception ( 'Skipping this optional device because some ' 'services failed to start.' )
|
def run ( self , module , post_check ) :
'''Execute the configured source code in a module and run any post
checks .
Args :
module ( Module ) : a module to execute the configured code in .
post _ check ( callable ) : a function that can raise an exception
if expected post - conditions are not met after code execution .'''
|
try : # Simulate the sys . path behaviour decribed here :
# https : / / docs . python . org / 2 / library / sys . html # sys . path
_cwd = os . getcwd ( )
_sys_path = list ( sys . path )
_sys_argv = list ( sys . argv )
sys . path . insert ( 0 , os . path . dirname ( self . _path ) )
sys . argv = [ os . path . basename ( self . _path ) ] + self . _argv
exec ( self . _code , module . __dict__ )
post_check ( )
except Exception as e :
self . _failed = True
self . _error_detail = traceback . format_exc ( )
_exc_type , _exc_value , exc_traceback = sys . exc_info ( )
filename , line_number , func , txt = traceback . extract_tb ( exc_traceback ) [ - 1 ]
self . _error = "%s\nFile \"%s\", line %d, in %s:\n%s" % ( str ( e ) , os . path . basename ( filename ) , line_number , func , txt )
finally : # undo sys . path , CWD fixups
os . chdir ( _cwd )
sys . path = _sys_path
sys . argv = _sys_argv
self . ran = True
|
def _update_known_atts ( self , ** kwargs ) :
"""Updates instance attributes with supplied keyword arguments ."""
|
for k , v in kwargs . items ( ) :
if k not in ATTR_KEYS : # Warn if passed in unknown kwargs
raise SyntaxWarning ( 'Unknown argument: {}' . format ( k ) )
elif not v : # Delete attributes with falsey values
delattr ( self , k )
else :
setattr ( self , k , v )
|
def save ( file_name , content ) :
"""Save content to a file"""
|
with open ( file_name , "w" , encoding = "utf-8" ) as output_file :
output_file . write ( content )
return output_file . name
|
def update_invoice_item ( self , invoice_item_id , invoice_item_dict ) :
"""Updates an invoice item
: param invoice _ item _ id : the invoice item id
: param invoice _ item _ dict : dict
: return : dict"""
|
return self . _create_put_request ( resource = INVOICE_ITEMS , billomat_id = invoice_item_id , send_data = invoice_item_dict )
|
def note_to_chord ( notes ) :
"""Convert note list to chord list
: param list [ str ] notes : list of note arranged from lower note . ex ) [ " C " , " Eb " , " G " ]
: rtype : list [ pychord . Chord ]
: return : list of chord"""
|
if not notes :
raise ValueError ( "Please specify notes which consist a chord." )
root = notes [ 0 ]
root_and_positions = [ ]
for rotated_notes in get_all_rotated_notes ( notes ) :
rotated_root = rotated_notes [ 0 ]
root_and_positions . append ( [ rotated_root , notes_to_positions ( rotated_notes , rotated_notes [ 0 ] ) ] )
chords = [ ]
for temp_root , positions in root_and_positions :
quality = find_quality ( positions )
if quality is None :
continue
if temp_root == root :
chord = "{}{}" . format ( root , quality )
else :
chord = "{}{}/{}" . format ( temp_root , quality , root )
chords . append ( Chord ( chord ) )
return chords
|
def _get_image ( self ) :
"""Get a PIL Image instance of this file .
The image is cached to avoid the file needing to be read again if the
function is called again ."""
|
if not hasattr ( self , '_image_cache' ) :
from easy_thumbnails . source_generators import pil_image
self . image = pil_image ( self )
return self . _image_cache
|
async def dump_tuple ( self , elem , elem_type , params = None , obj = None ) :
"""Dumps tuple of elements to the writer .
: param elem :
: param elem _ type :
: param params :
: param obj :
: return :"""
|
if len ( elem ) != len ( elem_type . f_specs ( ) ) :
raise ValueError ( 'Fixed size tuple has not defined size: %s' % len ( elem_type . f_specs ( ) ) )
elem_fields = params [ 0 ] if params else None
if elem_fields is None :
elem_fields = elem_type . f_specs ( )
obj = [ ] if obj is None else x . get_elem ( obj )
for idx , elem in enumerate ( elem ) :
try :
self . tracker . push_index ( idx )
fvalue = await self . _dump_field ( elem , elem_fields [ idx ] , params [ 1 : ] if params else None )
obj . append ( fvalue )
self . tracker . pop ( )
except Exception as e :
raise helpers . ArchiveException ( e , tracker = self . tracker ) from e
return obj
|
def get_device ( self , device_id ) :
"""Return specified device .
Returns a Command ."""
|
def process_result ( result ) :
return Device ( result )
return Command ( 'get' , [ ROOT_DEVICES , device_id ] , process_result = process_result )
|
def log_relative_gauss ( z ) :
"""log _ relative _ gauss"""
|
if z < - 6 :
return 1 , - 1.0e12 , - 1
if z > 6 :
return 0 , 0 , 1
else :
logphi = - 0.5 * ( z * z + l2p )
logPhi = np . log ( .5 * special . erfc ( - z / sq2 ) )
e = np . exp ( logphi - logPhi )
return e , logPhi , 0
|
def spawn ( cls , argv , cwd = None , env = None , dimensions = ( 24 , 80 ) ) :
"""Start the given command in a child process in a pseudo terminal .
This does all the setting up the pty , and returns an instance of
PtyProcess .
Dimensions of the psuedoterminal used for the subprocess can be
specified as a tuple ( rows , cols ) , or the default ( 24 , 80 ) will be
used ."""
|
if isinstance ( argv , str ) :
argv = shlex . split ( argv , posix = False )
if not isinstance ( argv , ( list , tuple ) ) :
raise TypeError ( "Expected a list or tuple for argv, got %r" % argv )
# Shallow copy of argv so we can modify it
argv = argv [ : ]
command = argv [ 0 ]
env = env or os . environ
path = env . get ( 'PATH' , os . defpath )
command_with_path = which ( command , path = path )
if command_with_path is None :
raise FileNotFoundError ( 'The command was not found or was not ' + 'executable: %s.' % command )
command = command_with_path
argv [ 0 ] = command
cmdline = ' ' + subprocess . list2cmdline ( argv [ 1 : ] )
cwd = cwd or os . getcwd ( )
proc = PTY ( dimensions [ 1 ] , dimensions [ 0 ] )
# Create the environemnt string .
envStrs = [ ]
for ( key , value ) in env . items ( ) :
envStrs . append ( '%s=%s' % ( key , value ) )
env = '\0' . join ( envStrs ) + '\0'
if PY2 :
command = _unicode ( command )
cwd = _unicode ( cwd )
cmdline = _unicode ( cmdline )
env = _unicode ( env )
if len ( argv ) == 1 :
proc . spawn ( command , cwd = cwd , env = env )
else :
proc . spawn ( command , cwd = cwd , env = env , cmdline = cmdline )
inst = cls ( proc )
inst . _winsize = dimensions
# Set some informational attributes
inst . argv = argv
if env is not None :
inst . env = env
if cwd is not None :
inst . launch_dir = cwd
return inst
|
def get_rule_option ( self , rule_name_or_id , option_name ) :
"""Returns the value of a given option for a given rule . LintConfigErrors will be raised if the
rule or option don ' t exist ."""
|
option = self . _get_option ( rule_name_or_id , option_name )
return option . value
|
def to_lower ( self , string ) :
"Helper function to transform strings to lower case"
|
value = None
try :
value = string . lower ( )
except AttributeError :
value = ""
finally :
return value
|
def save ( self , * args , ** kwargs ) :
"""Set the description field on save ."""
|
if self . gen_description :
self . description = strip_tags ( self . description_from_content ( ) )
super ( MetaData , self ) . save ( * args , ** kwargs )
|
def frames_iter ( socket , tty ) :
"""Return a generator of frames read from socket . A frame is a tuple where
the first item is the stream number and the second item is a chunk of data .
If the tty setting is enabled , the streams are multiplexed into the stdout
stream ."""
|
if tty :
return ( ( STDOUT , frame ) for frame in frames_iter_tty ( socket ) )
else :
return frames_iter_no_tty ( socket )
|
def _call ( callable_obj , arg_names , namespace ) :
"""Actually calls the callable with the namespace parsed from the command
line .
Args :
callable _ obj : a callable object
arg _ names : name of the function arguments
namespace : the namespace object parsed from the command line"""
|
arguments = { arg_name : getattr ( namespace , arg_name ) for arg_name in arg_names }
return callable_obj ( ** arguments )
|
def format_string ( string , context ) :
"""String - template format a string :
> > > format _ string ( ' $ foo and $ { foo } s ' , dict ( foo = 42 ) )
'42 and 42s '
This does not do any attribute lookup etc . For more advanced string
formattings have a look at the ` werkzeug . template ` module .
: param string : the format string .
: param context : a dict with the variables to insert ."""
|
def lookup_arg ( match ) :
x = context [ match . group ( 1 ) or match . group ( 2 ) ]
if not isinstance ( x , string_types ) :
x = type ( string ) ( x )
return x
return _format_re . sub ( lookup_arg , string )
|
def list ( self , request , * args , ** kwargs ) :
"""Filter services by type
It is possible to filter services by their types . Example :
/ api / services / ? service _ type = DigitalOcean & service _ type = OpenStack"""
|
return super ( ServicesViewSet , self ) . list ( request , * args , ** kwargs )
|
def __delete_all_bgedges_between_two_vertices ( self , vertex1 , vertex2 ) :
"""Deletes all edges between two supplied vertices
: param vertex1 : a first out of two vertices edges between which are to be deleted
: type vertex1 : any python hashable object . : class : ` bg . vertex . BGVertex ` is expected
: param vertex2 : a second out of two vertices edges between which are to be deleted
: type vertex2 : any python hashable object . : class : ` bg . vertex . BGVertex ` is expected
: return : ` ` None ` ` , performs inplace changes"""
|
edges_to_be_deleted_with_keys = [ ( key , data ) for v1 , v2 , key , data in self . bg . edges ( nbunch = vertex1 , keys = True , data = True ) if v2 == vertex2 ]
for key , data in edges_to_be_deleted_with_keys :
self . __delete_bgedge ( BGEdge ( vertex1 = vertex1 , vertex2 = vertex2 , multicolor = data [ "attr_dict" ] [ "multicolor" ] ) , key = key )
|
def heightmap_dig_bezier ( hm : np . ndarray , px : Tuple [ int , int , int , int ] , py : Tuple [ int , int , int , int ] , startRadius : float , startDepth : float , endRadius : float , endDepth : float , ) -> None :
"""Carve a path along a cubic Bezier curve .
Both radius and depth can vary linearly along the path .
Args :
hm ( numpy . ndarray ) : A numpy . ndarray formatted for heightmap functions .
px ( Sequence [ int ] ) : The 4 ` x ` coordinates of the Bezier curve .
py ( Sequence [ int ] ) : The 4 ` y ` coordinates of the Bezier curve .
startRadius ( float ) : The starting radius size .
startDepth ( float ) : The starting depth .
endRadius ( float ) : The ending radius size .
endDepth ( float ) : The ending depth ."""
|
lib . TCOD_heightmap_dig_bezier ( _heightmap_cdata ( hm ) , px , py , startRadius , startDepth , endRadius , endDepth , )
|
def process_response ( self , response ) :
"""Store response headers ."""
|
self . response_headers = [ ( k , v ) for k , v in sorted ( response . headers . items ( ) ) ]
|
def Put ( self , key , obj ) :
"""Add the object to the cache ."""
|
# Remove the old entry if it is there .
node = self . _hash . pop ( key , None )
if node :
self . _age . Unlink ( node )
# Make a new node and insert it .
node = Node ( key = key , data = obj )
self . _hash [ key ] = node
self . _age . AppendNode ( node )
self . Expire ( )
return key
|
def set_node_text ( self , node , text ) :
"""Set text value as sole Text child node of element ; any existing
Text nodes are removed"""
|
# Remove any existing Text node children
for child in self . get_node_children ( node ) :
if child . nodeType == xml . dom . Node . TEXT_NODE :
self . remove_node_child ( node , child , True )
if text is not None :
text_node = self . new_impl_text ( text )
self . add_node_child ( node , text_node )
|
def csvpretty ( csvfile : csvfile = sys . stdin ) :
"""Pretty print a CSV file ."""
|
shellish . tabulate ( csv . reader ( csvfile ) )
|
def _prepare_request_data ( self , eopatch , bbox , time_interval ) :
"""Collects all parameters used for DataRequest , each one is taken either from initialization parameters or
from EOPatch"""
|
service_type = ServiceType ( self . _get_parameter ( 'service_type' , eopatch ) )
if time_interval is None :
time_interval = self . _get_parameter ( 'time_interval' , eopatch )
if service_type is ServiceType . WMS :
size_x_name , size_y_name = 'width' , 'height'
else :
size_x_name , size_y_name = 'resx' , 'resy'
return { 'layer' : self . layer , 'bbox' : bbox if bbox is not None else self . _get_parameter ( 'bbox' , eopatch ) , 'time' : time_interval , 'time_difference' : self . _get_parameter ( 'time_difference' , eopatch ) , 'maxcc' : self . _get_parameter ( 'maxcc' , eopatch ) , 'image_format' : self . image_format , 'custom_url_params' : self . custom_url_params , 'data_source' : self . data_source , 'instance_id' : self . instance_id , size_x_name : self . _get_parameter ( 'size_x' , eopatch ) , size_y_name : self . _get_parameter ( 'size_y' , eopatch ) } , service_type
|
def get_group_partition ( group , partition_count ) :
"""Given a group name , return the partition number of the consumer offset
topic containing the data associated to that group ."""
|
def java_string_hashcode ( s ) :
h = 0
for c in s :
h = ( 31 * h + ord ( c ) ) & 0xFFFFFFFF
return ( ( h + 0x80000000 ) & 0xFFFFFFFF ) - 0x80000000
return abs ( java_string_hashcode ( group ) ) % partition_count
|
def check ( text ) :
"""Suggest the preferred forms ."""
|
err = "redundancy.wallace"
msg = "Redundancy. Use '{}' instead of '{}'."
redundancies = [ [ "rectangular" , [ "rectangular in shape" ] ] , [ "audible" , [ "audible to the ear" ] ] , ]
return preferred_forms_check ( text , redundancies , err , msg )
|
def compose ( * parameter_functions ) :
"""Composes multiple modification functions in order .
Args :
* parameter _ functions : The functions to compose .
Returns :
A parameter modification function that consists of applying all the provided
functions ."""
|
def composed_fn ( var_name , variable , phase ) :
for fn in parameter_functions :
variable = fn ( var_name , variable , phase )
return variable
return composed_fn
|
def post ( self , endpoint , json = None , params = None , ** kwargs ) :
"""POST to DHIS2
: param endpoint : DHIS2 API endpoint
: param json : HTTP payload
: param params : HTTP parameters
: return : requests . Response object"""
|
json = kwargs [ 'data' ] if 'data' in kwargs else json
return self . _make_request ( 'post' , endpoint , data = json , params = params )
|
def get_filenames ( cls , mapreduce_state ) :
"""See parent class ."""
|
shards = mapreduce_state . mapreduce_spec . mapper . shard_count
filenames = [ ]
for _ in range ( shards ) :
filenames . append ( [ None ] * shards )
shard_states = model . ShardState . find_all_by_mapreduce_state ( mapreduce_state )
for x , shard_state in enumerate ( shard_states ) :
shard_filenames = shard_state . writer_state [ "shard_filenames" ]
for y in range ( shards ) :
filenames [ y ] [ x ] = shard_filenames [ y ]
return filenames
|
def setChoice ( key , * args ) :
'''check choice'''
|
return And ( lambda n : n in args , error = SCHEMA_RANGE_ERROR % ( key , str ( args ) ) )
|
def iter_methods ( self ) :
"""Iterate over stored functions and instance methods
Yields :
Instance methods or function objects"""
|
for wrkey , obj in self . iter_instances ( ) :
f , obj_id = wrkey
if f == 'function' :
yield self [ wrkey ]
else :
yield getattr ( obj , f . __name__ )
|
def create_weapon_layer ( weapon , hashcode , isSecond = False ) :
"""Creates the layer for weapons ."""
|
return pgnreader . parse_pagan_file ( ( '%s%spgn%s' % ( PACKAGE_DIR , os . sep , os . sep ) ) + weapon + '.pgn' , hashcode , sym = False , invert = isSecond )
|
def has_code ( state , text , incorrect_msg = "Check the {ast_path}. The checker expected to find {text}." , fixed = False , ) :
"""Test whether the student code contains text .
Args :
state : State instance describing student and solution code . Can be omitted if used with Ex ( ) .
text : text that student code must contain . Can be a regex pattern or a simple string .
incorrect _ msg : feedback message if text is not in student code .
fixed : whether to match text exactly , rather than using regular expressions .
Note :
Functions like ` ` check _ node ` ` focus on certain parts of code .
Using these functions followed by ` ` has _ code ` ` will only look
in the code being focused on .
: Example :
If the student code is . . : :
SELECT a FROM b WHERE id < 100
Then the first test below would ( unfortunately ) pass , but the second would fail . . : :
# contained in student code
Ex ( ) . has _ code ( text = " id < 10 " )
# the $ means that you are matching the end of a line
Ex ( ) . has _ code ( text = " id < 10 $ " )
By setting ` ` fixed = True ` ` , you can search for fixed strings : :
# without fixed = True , ' * ' matches any character
Ex ( ) . has _ code ( text = " SELECT * FROM b " ) # passes
Ex ( ) . has _ code ( text = " SELECT \\ \\ * FROM b " ) # fails
Ex ( ) . has _ code ( text = " SELECT * FROM b " , fixed = True ) # fails
You can check only the code corresponding to the WHERE clause , using : :
where = Ex ( ) . check _ node ( ' SelectStmt ' , 0 ) . check _ edge ( ' where _ clause ' )
where . has _ code ( text = " id < 10)"""
|
stu_ast = state . student_ast
stu_code = state . student_code
# fallback on using complete student code if no ast
ParseError = state . ast_dispatcher . ParseError
def get_text ( ast , code ) :
if isinstance ( ast , ParseError ) :
return code
try :
return ast . get_text ( code )
except :
return code
stu_text = get_text ( stu_ast , stu_code )
_msg = incorrect_msg . format ( ast_path = state . get_ast_path ( ) or "highlighted code" , text = text )
# either simple text matching or regex test
res = text in stu_text if fixed else re . search ( text , stu_text )
if not res :
state . report ( Feedback ( _msg ) )
return state
|
def creation_dates ( self , sort = True ) :
"""Return a list of ( file _ path , creation _ date ) tuples created from list of walked paths .
: param sort : Bool , sorts file _ paths on created _ date from newest to oldest .
: return : List of ( file _ path , created _ date ) tuples ."""
|
if not sort :
return pool_creation_date ( self . filepaths )
else :
pcd = pool_creation_date ( self . filepaths )
pcd . sort ( key = itemgetter ( 1 ) , reverse = True )
return pcd
|
def sort_string_by_pairs ( strings ) :
"""Group a list of strings by pairs , by matching those with only
one character difference between each other together ."""
|
assert len ( strings ) % 2 == 0
pairs = [ ]
strings = list ( strings )
# This shallow copies the list
while strings :
template = strings . pop ( )
for i , candidate in enumerate ( strings ) :
if count_string_diff ( template , candidate ) == 1 :
pair = [ template , strings . pop ( i ) ]
pair . sort ( )
pairs . append ( pair )
break
return pairs
|
def register_states_of_state_machine ( self , state_machine ) :
"""This functions registers all states of state machine .
: param state _ machine : the state machine to register all states of
: return :"""
|
root = state_machine . root_state
root . add_observer ( self , "state_execution_status" , notify_after_function = self . on_state_execution_status_changed_after )
self . recursively_register_child_states ( root )
|
def add_snmp_host ( self , ** kwargs ) :
"""Add SNMP host to NOS device .
Args :
host _ info ( tuple ( str , str ) ) : Tuple of host IP and port .
community ( str ) : Community string to be added to device .
callback ( function ) : A function executed upon completion of the
method . The only parameter passed to ` callback ` will be the
` ` ElementTree ` ` ` config ` .
Returns :
Return value of ` callback ` .
Raises :
KeyError : if ` host _ info ` or ` community ` is not defined ."""
|
host_info = kwargs . pop ( 'host_info' )
community = kwargs . pop ( 'community' )
callback = kwargs . pop ( 'callback' , self . _callback )
config = ET . Element ( 'config' )
snmp_server = ET . SubElement ( config , 'snmp-server' , xmlns = ( "urn:brocade.com:mgmt:" "brocade-snmp" ) )
host = ET . SubElement ( snmp_server , 'host' )
ip_addr = ET . SubElement ( host , 'ip' )
ip_addr . text = host_info [ 0 ]
com = ET . SubElement ( host , 'community' )
com . text = community
udp_port = ET . SubElement ( host , 'udp-port' )
udp_port . text = host_info [ 1 ]
return callback ( config )
|
def share ( self , name , item ) :
'''Share an object via the telepath protocol .
Args :
name ( str ) : Name of the shared object
item ( object ) : The object to share over telepath .'''
|
try :
if isinstance ( item , s_telepath . Aware ) :
item . onTeleShare ( self , name )
self . shared [ name ] = item
except Exception :
logger . exception ( f'onTeleShare() error for: {name}' )
|
def find_loci ( self , cluster_size , maxgap , locusview = False , colordict = None ) :
'''Finds the loci of a given cluster size & maximum gap between cluster members .
Args
cluster _ size ( int ) : minimum number of genes in the cluster .
maxgap ( int ) : max basepair gap between genes in the cluster .
Kwargs
locusview ( bool ) : whether or not a map is generated for the locus _ parent _ organism
colordict ( list ) : pass a pre - made color scheme for identified proteins'''
|
if colordict != None :
self . search . protein_arrow_color_dict = colordict
for organism in self . organisms :
print 'finding loci for' , organism . name
# reset loci if there is something in there already
organism . loci = [ ]
orghits = [ ]
for protein in organism . proteins :
if len ( protein . hmm_hit_list ) > 0 :
orghits . append ( ( organism . accession , protein . accession , protein . start_bp , protein . end_bp , protein ) )
bp_start_pooled = [ hit [ 2 ] for hit in orghits ]
try :
clustered_data = self . cluster_number ( bp_start_pooled , maxgap )
significant_cluster_list = [ ]
for cluster in clustered_data :
if len ( cluster ) > cluster_size :
significant_cluster_list . append ( cluster )
# print significant _ cluster _ list
for cluster in significant_cluster_list :
proteins_in_locus = [ ]
cluster . sort ( )
for bp_start in cluster :
for hit in orghits :
if bp_start == hit [ 2 ] :
proteins_in_locus . append ( hit [ 4 ] )
organism . loci . append ( Locus ( proteins_in_locus , organism , self . search . query_names , locusview ) )
except IndexError , e :
print 'Index error' , str ( e ) , organism . name
print 'total of' , str ( len ( organism . loci ) ) , 'found for' , organism . name
|
def hyphen_range ( string ) :
'''Expands a string of numbers separated by commas and hyphens into a list of integers .
For example : 2-3,5-7,20-21,23,100-200'''
|
list_numbers = list ( )
temporary_list = string . split ( ',' )
for element in temporary_list :
sub_element = element . split ( '-' )
if len ( sub_element ) == 1 :
list_numbers . append ( int ( sub_element [ 0 ] ) )
elif len ( sub_element ) == 2 :
for x in range ( int ( sub_element [ 0 ] ) , int ( sub_element [ 1 ] ) + 1 ) :
list_numbers . append ( x )
else :
raise Exception ( 'Something went wrong expanding the range' . format ( string ) )
return list_numbers
|
def sparse_GP_regression_1D ( num_samples = 400 , num_inducing = 5 , max_iters = 100 , optimize = True , plot = True , checkgrad = False ) :
"""Run a 1D example of a sparse GP regression ."""
|
# sample inputs and outputs
X = np . random . uniform ( - 3. , 3. , ( num_samples , 1 ) )
Y = np . sin ( X ) + np . random . randn ( num_samples , 1 ) * 0.05
# construct kernel
rbf = GPy . kern . RBF ( 1 )
# create simple GP Model
m = GPy . models . SparseGPRegression ( X , Y , kernel = rbf , num_inducing = num_inducing )
if checkgrad :
m . checkgrad ( )
if optimize :
m . optimize ( 'tnc' , max_iters = max_iters )
if plot :
m . plot ( )
return m
|
def exit_and_fail ( self , msg = None , out = None ) :
"""Exits the runtime with a nonzero exit code , indicating failure .
: param msg : A string message to print to stderr or another custom file desciptor before exiting .
( Optional )
: param out : The file descriptor to emit ` msg ` to . ( Optional )"""
|
self . exit ( result = PANTS_FAILED_EXIT_CODE , msg = msg , out = out )
|
def _import_plugins ( self ) -> None :
"""Import and register plugin in the plugin manager .
The pluggy library is used as plugin manager ."""
|
logger . debug ( 'Importing plugins' )
self . _pm = pluggy . PluginManager ( 'sirbot' )
self . _pm . add_hookspecs ( hookspecs )
for plugin in self . config [ 'sirbot' ] [ 'plugins' ] :
try :
p = importlib . import_module ( plugin )
except ( ModuleNotFoundError , ) :
if os . getcwd ( ) not in sys . path :
sys . path . append ( os . getcwd ( ) )
p = importlib . import_module ( plugin )
else :
raise
self . _pm . register ( p )
|
def save ( self , output_file = None , convert_to = None , runscript = "/bin/bash" , force = False ) :
'''save will convert a recipe to a specified format ( defaults to the
opposite of the recipe type originally loaded , ( e . g . , docker - - >
singularity and singularity - - > docker ) and write to an output file ,
if specified . If not specified , a temporary file is used .
Parameters
output _ file : the file to save to , not required ( estimates default )
convert _ to : can be manually forced ( docker or singularity )
runscript : default runscript ( entrypoint ) to use
force : if True , override discovery from Dockerfile'''
|
converted = self . convert ( convert_to , runscript , force )
if output_file is None :
output_file = self . _get_conversion_outfile ( convert_to = None )
bot . info ( 'Saving to %s' % output_file )
write_file ( output_file , converted )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.