signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def _get_encryption_headers ( key , source = False ) :
"""Builds customer encryption key headers
: type key : bytes
: param key : 32 byte key to build request key and hash .
: type source : bool
: param source : If true , return headers for the " source " blob ; otherwise ,
return headers for the " destination " blob .
: rtype : dict
: returns : dict of HTTP headers being sent in request .""" | if key is None :
return { }
key = _to_bytes ( key )
key_hash = hashlib . sha256 ( key ) . digest ( )
key_hash = base64 . b64encode ( key_hash )
key = base64 . b64encode ( key )
if source :
prefix = "X-Goog-Copy-Source-Encryption-"
else :
prefix = "X-Goog-Encryption-"
return { prefix + "Algorithm" : "AES256" , prefix + "Key" : _bytes_to_unicode ( key ) , prefix + "Key-Sha256" : _bytes_to_unicode ( key_hash ) , } |
def infer_format ( filename : str ) -> str :
"""Return extension identifying format of given filename""" | _ , ext = os . path . splitext ( filename )
return ext |
def _decode_message_set_iter ( cls , data ) :
"""Iteratively decode a MessageSet
Reads repeated elements of ( offset , message ) , calling decode _ message
to decode a single message . Since compressed messages contain futher
MessageSets , these two methods have been decoupled so that they may
recurse easily .""" | cur = 0
read_message = False
while cur < len ( data ) :
try :
( ( offset , ) , cur ) = relative_unpack ( '>q' , data , cur )
( msg , cur ) = read_int_string ( data , cur )
for ( offset , message ) in KafkaProtocol . _decode_message ( msg , offset ) :
read_message = True
yield OffsetAndMessage ( offset , message )
except BufferUnderflowError : # NOTE : Not sure this is correct error handling :
# Is it possible to get a BUE if the message set is somewhere
# in the middle of the fetch response ? If so , we probably have
# an issue that ' s not fetch size too small .
# Aren ' t we ignoring errors if we fail to unpack data by
# raising StopIteration ( ) ?
# If _ decode _ message ( ) raises a ChecksumError , couldn ' t that
# also be due to the fetch size being too small ?
if read_message is False : # If we get a partial read of a message , but haven ' t
# yielded anything there ' s a problem
raise ConsumerFetchSizeTooSmall ( )
else :
raise StopIteration ( ) |
def json_event_feed ( request , location_id = None , room_id = None ) :
'''The Jquery fullcalendar app requires a JSON news feed , so this function
creates the feed from upcoming PrivateEvent objects''' | if not getConstant ( 'calendar__privateCalendarFeedEnabled' ) or not request . user . is_staff :
return JsonResponse ( { } )
this_user = request . user
startDate = request . GET . get ( 'start' , '' )
endDate = request . GET . get ( 'end' , '' )
timeZone = request . GET . get ( 'timezone' , getattr ( settings , 'TIME_ZONE' , 'UTC' ) )
time_filter_dict_events = { }
if startDate :
time_filter_dict_events [ 'startTime__gte' ] = ensure_timezone ( datetime . strptime ( startDate , '%Y-%m-%d' ) )
if endDate :
time_filter_dict_events [ 'endTime__lte' ] = ensure_timezone ( datetime . strptime ( endDate , '%Y-%m-%d' ) ) + timedelta ( days = 1 )
instructor_groups = list ( this_user . groups . all ( ) . values_list ( 'id' , flat = True ) )
filters = Q ( event__privateevent__isnull = False ) & ( Q ( event__privateevent__displayToGroup__in = instructor_groups ) | Q ( event__privateevent__displayToUsers = this_user ) | ( Q ( event__privateevent__displayToGroup__isnull = True ) & Q ( event__privateevent__displayToUsers__isnull = True ) ) )
if location_id :
filters = filters & Q ( event__location__id = location_id )
if room_id :
filters = filters & Q ( event__room_id = room_id )
occurrences = EventOccurrence . objects . filter ( filters ) . filter ( ** time_filter_dict_events ) . order_by ( '-startTime' )
eventlist = [ EventFeedItem ( x , timeZone = timeZone ) . __dict__ for x in occurrences ]
return JsonResponse ( eventlist , safe = False ) |
def float_to_knx2 ( floatval ) :
"""Convert a float to a 2 byte KNX float value""" | if floatval < - 671088.64 or floatval > 670760.96 :
raise KNXException ( "float {} out of valid range" . format ( floatval ) )
floatval = floatval * 100
i = 0
for i in range ( 0 , 15 ) :
exp = pow ( 2 , i )
if ( ( floatval / exp ) >= - 2048 ) and ( ( floatval / exp ) < 2047 ) :
break
if floatval < 0 :
sign = 1
mantisse = int ( 2048 + ( floatval / exp ) )
else :
sign = 0
mantisse = int ( floatval / exp )
return [ ( sign << 7 ) + ( i << 3 ) + ( mantisse >> 8 ) , mantisse & 0xff ] |
def to_file ( epub , file ) :
"""Export to ` ` file ` ` , which is a * file * or * file - like object * .""" | directory = tempfile . mkdtemp ( '-epub' )
# Write out the contents to the filesystem .
package_filenames = [ ]
for package in epub :
opf_filepath = Package . to_file ( package , directory )
opf_filename = os . path . basename ( opf_filepath )
package_filenames . append ( opf_filename )
# Create the container . xml
container_xml_filepath = os . path . join ( directory , EPUB_CONTAINER_XML_RELATIVE_PATH )
template = jinja2 . Template ( CONTAINER_XML_TEMPLATE , trim_blocks = True , lstrip_blocks = True )
os . makedirs ( os . path . dirname ( container_xml_filepath ) )
# FIXME PY3
with open ( container_xml_filepath , 'w' ) as fb :
xml = template . render ( package_filenames = package_filenames )
fb . write ( xml )
# Write the mimetype file .
with open ( os . path . join ( directory , 'mimetype' ) , 'w' ) as fb :
fb . write ( "application/epub+zip" )
# Pack everything up
pack_epub ( directory , file = file ) |
def content_children ( self ) :
"""A sequence containing the text - container child elements of this
` ` < a : p > ` ` element , i . e . ( a : r | a : br | a : fld ) .""" | text_types = { CT_RegularTextRun , CT_TextLineBreak , CT_TextField }
return tuple ( elm for elm in self if type ( elm ) in text_types ) |
def set_resolved_url ( self , item = None , subtitles = None ) :
'''Takes a url or a listitem to be played . Used in conjunction with a
playable list item with a path that calls back into your addon .
: param item : A playable list item or url . Pass None to alert XBMC of a
failure to resolve the item .
. . warning : : When using set _ resolved _ url you should ensure
the initial playable item ( which calls back
into your addon ) doesn ' t have a trailing
slash in the URL . Otherwise it won ' t work
reliably with XBMC ' s PlayMedia ( ) .
: param subtitles : A URL to a remote subtitles file or a local filename
for a subtitles file to be played along with the
item .''' | if self . _end_of_directory :
raise Exception ( 'Current XBMC handle has been removed. Either ' 'set_resolved_url(), end_of_directory(), or ' 'finish() has already been called.' )
self . _end_of_directory = True
succeeded = True
if item is None : # None item indicates the resolve url failed .
item = { }
succeeded = False
if isinstance ( item , basestring ) : # caller is passing a url instead of an item dict
item = { 'path' : item }
item = self . _listitemify ( item )
item . set_played ( True )
xbmcplugin . setResolvedUrl ( self . handle , succeeded , item . as_xbmc_listitem ( ) )
# call to _ add _ subtitles must be after setResolvedUrl
if subtitles :
self . _add_subtitles ( subtitles )
return [ item ] |
def position_p ( self ) :
"""The proportional constant for the position PID .""" | self . _position_p , value = self . get_attr_int ( self . _position_p , 'hold_pid/Kp' )
return value |
def get_file_mime_encoding ( parts ) :
"""Get encoding value from splitted output of file - - mime - - uncompress .""" | for part in parts :
for subpart in part . split ( " " ) :
if subpart . startswith ( "compressed-encoding=" ) :
mime = subpart . split ( "=" ) [ 1 ] . strip ( )
return Mime2Encoding . get ( mime )
return None |
def _find_variable ( self , pattern , logline ) :
"""Return the variable parts of the code given a tuple of strings pattern .
Example : ( this , is , a , pattern ) - > ' this is a good pattern ' - > [ good ]""" | var_subs = [ ]
# find the beginning of the pattern
first_index = logline . index ( pattern [ 0 ] )
beg_str = logline [ : first_index ]
# strip the beginning substring
var_subs . append ( self . _strip_datetime ( beg_str ) )
for patt , patt_next in zip ( pattern [ : - 1 ] , pattern [ 1 : ] ) : # regular expression pattern that finds what ' s in the middle of
# two substrings
pat = re . escape ( patt ) + '(.*)' + re . escape ( patt_next )
# extract whats in the middle of the two substrings
between = re . search ( pat , logline )
try : # add what ' s in between if the search isn ' t none
var_subs . append ( between . group ( 1 ) )
except Exception :
pass
rest_of_string = logline . rindex ( pattern [ - 1 ] ) + len ( pattern [ - 1 ] )
# add the rest of the string to end minus the counters and durations
end_str = logline [ rest_of_string : ]
var_subs . append ( self . _strip_counters ( end_str ) )
# strip whitespace from each string , but keep the strings themselves
# var _ subs = [ v . strip ( ) for v in var _ subs ]
return var_subs |
def _new_conn ( self ) :
"""Return a fresh : class : ` httplib . HTTPConnection ` .""" | self . num_connections += 1
log . info ( "Starting new HTTP connection (%d): %s" % ( self . num_connections , self . host ) )
return HTTPConnection ( host = self . host , port = self . port ) |
def binary_shader_for_rules ( self , output_jar , jar , rules , jvm_options = None ) :
"""Yields an ` Executor . Runner ` that will perform shading of the binary ` jar ` when ` run ( ) ` .
No default rules are applied ; only the rules passed in as a parameter will be used .
: param unicode output _ jar : The path to dump the shaded jar to ; will be over - written if it
exists .
: param unicode jar : The path to the jar file to shade .
: param list rules : The rules to apply for shading .
: param list jvm _ options : an optional sequence of options for the underlying jvm
: returns : An ` Executor . Runner ` that can be ` run ( ) ` to shade the given ` jar ` .
: rtype : : class : ` pants . java . executor . Executor . Runner `""" | with self . temporary_rules_file ( rules ) as rules_file :
logger . debug ( 'Running jarjar with rules:\n{}' . format ( ' ' . join ( rule . render ( ) for rule in rules ) ) )
yield self . _executor . runner ( classpath = self . _jarjar_classpath , main = 'org.pantsbuild.jarjar.Main' , jvm_options = jvm_options , args = [ 'process' , rules_file , jar , output_jar ] ) |
def _handle_tag_definefontalignzones ( self ) :
"""Handle the DefineFontAlignZones tag .""" | obj = _make_object ( "DefineFontAlignZones" )
obj . FontId = unpack_ui16 ( self . _src )
bc = BitConsumer ( self . _src )
obj . CSMTableHint = bc . u_get ( 2 )
obj . Reserved = bc . u_get ( 6 )
obj . ZoneTable = zone_records = [ ]
glyph_count = self . _last_defined_glyphs_quantity
self . _last_defined_glyphs_quantity = None
for _ in range ( glyph_count ) :
zone_record = _make_object ( "ZoneRecord" )
zone_records . append ( zone_record )
zone_record . NumZoneData = unpack_ui8 ( self . _src )
zone_record . ZoneData = zone_data = [ ]
for _ in range ( zone_record . NumZoneData ) :
zone_datum = _make_object ( "ZoneData" )
zone_data . append ( zone_datum )
zone_datum . AlignmentCoordinate = unpack_float16 ( self . _src )
zone_datum . Range = unpack_float16 ( self . _src )
bc = BitConsumer ( self . _src )
zone_record . Reserved = bc . u_get ( 6 )
zone_record . ZoneMaskY = bc . u_get ( 1 )
zone_record . ZoneMaskX = bc . u_get ( 1 )
return obj |
def get_paths ( folder , ignore_endswith = ignore_endswith ) :
'''Return hologram file paths
Parameters
folder : str or pathlib . Path
Path to search folder
ignore _ endswith : list
List of filename ending strings indicating which
files should be ignored .''' | folder = pathlib . Path ( folder ) . resolve ( )
files = folder . rglob ( "*" )
for ie in ignore_endswith :
files = [ ff for ff in files if not ff . name . endswith ( ie ) ]
return sorted ( files ) |
def execute ( self , command ) :
"""Start a new MIP run .""" | process = subprocess . Popen ( command , preexec_fn = lambda : signal . signal ( signal . SIGPIPE , signal . SIG_DFL ) )
return process |
def get_unique_fields ( fld_lists ) :
"""Get unique namedtuple fields , despite potential duplicates in lists of fields .""" | flds = [ ]
fld_set = set ( [ f for flst in fld_lists for f in flst ] )
fld_seen = set ( )
# Add unique fields to list of fields in order that they appear
for fld_list in fld_lists :
for fld in fld_list : # Add fields if the field has not yet been seen
if fld not in fld_seen :
flds . append ( fld )
fld_seen . add ( fld )
assert len ( flds ) == len ( fld_set )
return flds |
def wrap ( vtkdataset ) :
"""This is a convenience method to safely wrap any given VTK data object
to its appropriate ` ` vtki ` ` data object .""" | wrappers = { 'vtkUnstructuredGrid' : vtki . UnstructuredGrid , 'vtkRectilinearGrid' : vtki . RectilinearGrid , 'vtkStructuredGrid' : vtki . StructuredGrid , 'vtkPolyData' : vtki . PolyData , 'vtkImageData' : vtki . UniformGrid , 'vtkStructuredPoints' : vtki . UniformGrid , 'vtkMultiBlockDataSet' : vtki . MultiBlock , }
key = vtkdataset . GetClassName ( )
try :
wrapped = wrappers [ key ] ( vtkdataset )
except :
logging . warning ( 'VTK data type ({}) is not currently supported by vtki.' . format ( key ) )
return vtkdataset
# if not supported just passes the VTK data object
return wrapped |
def RegisterCredentials ( cls , credentials ) :
"""Registers a path specification credentials .
Args :
credentials ( Credentials ) : credentials .
Raises :
KeyError : if credentials object is already set for the corresponding
type indicator .""" | if credentials . type_indicator in cls . _credentials :
raise KeyError ( 'Credentials object already set for type indicator: {0:s}.' . format ( credentials . type_indicator ) )
cls . _credentials [ credentials . type_indicator ] = credentials |
def nick ( self , nick ) :
"""Sets your nick .
Required arguments :
* nick - New nick or a tuple of possible new nicks .""" | nick_set_successfully = False
try :
self . _nick ( nick )
nick_set_successfully = True
except TypeError :
for nick_ in nick :
try :
self . _nick ( nick_ )
nick_set_successfully = True
break
except self . NicknameInUse :
pass
if not nick_set_successfully :
self . exception ( '433' ) |
def qteRegisterApplet ( self , cls , replaceApplet : bool = False ) :
"""Register ` ` cls ` ` as an applet .
The name of the applet is the class name of ` ` cls ` `
itself . For instance , if the applet was defined and registered
as
class NewApplet17 ( QtmacsApplet ) :
app _ name = qteRegisterApplet ( NewApplet17)
then the applet will be known as * NewApplet17 * , which is also
returned in ` ` app _ name ` ` .
If an applet with this name already exists then
` ` replaceApplet ` ` decides whether the registration will
overwrite the existing definition or ignore the registration
request altogether . In the first case , none of the already
instantiated applets will be affected , only newly created ones
will use the new definition .
. . note : : this method expects a * class * , not an instance .
| Args |
* ` ` cls ` ` ( * * class QtmacsApplet * * ) : this must really be a class ,
not an instance .
* ` ` replaceApplet ` ` ( * * bool * * ) : if applet with same name exists ,
then replace it .
| Returns |
* * * str * * : name under which the applet was registered with Qtmacs .
| Raises |
* * * QtmacsArgumentError * * if at least one argument has an invalid type .""" | # Check type of input arguments .
if not issubclass ( cls , QtmacsApplet ) :
args = ( 'cls' , 'class QtmacsApplet' , inspect . stack ( ) [ 0 ] [ 3 ] )
raise QtmacsArgumentError ( * args )
# Extract the class name as string , because this is the name
# under which the applet will be known .
class_name = cls . __name__
# Issue a warning if an applet with this name already exists .
if class_name in self . _qteRegistryApplets :
msg = 'The original applet <b>{}</b>' . format ( class_name )
if replaceApplet :
msg += ' was redefined.'
self . qteLogger . warning ( msg )
else :
msg += ' was not redefined.'
self . qteLogger . warning ( msg )
return class_name
# Execute the classmethod _ _ qteRegisterAppletInit _ _ to
# allow the applet to make global initialisations that do
# not depend on a particular instance , eg . the supported
# file types .
cls . __qteRegisterAppletInit__ ( )
# Add the class ( not instance ! ) to the applet registry .
self . _qteRegistryApplets [ class_name ] = cls
self . qteLogger . info ( 'Applet <b>{}</b> now registered.' . format ( class_name ) )
return class_name |
def getWindowPID ( self , hwnd ) :
"""Gets the process ID that the specified window belongs to""" | for w in self . _get_window_list ( ) :
if "kCGWindowNumber" in w and w [ "kCGWindowNumber" ] == hwnd :
return w [ "kCGWindowOwnerPID" ] |
def set ( self , val ) :
"""Set the value""" | import time
now = time . time ( )
expected_value = [ ]
new_val = { }
new_val [ 'timestamp' ] = now
if self . _value != None :
new_val [ 'last_value' ] = self . _value
expected_value = [ 'current_value' , str ( self . _value ) ]
new_val [ 'current_value' ] = val
try :
self . db . put_attributes ( self . id , new_val , expected_value = expected_value )
self . timestamp = new_val [ 'timestamp' ]
except SDBResponseError , e :
if e . status == 409 :
raise ValueError , "Sequence out of sync"
else :
raise |
def v_type_base ( ctx , stmt , no_error_report = False ) :
"""verify that the referenced identity exists .""" | # Find the identity
name = stmt . arg
stmt . i_identity = None
if name . find ( ":" ) == - 1 :
prefix = None
else :
[ prefix , name ] = name . split ( ':' , 1 )
if prefix is None or stmt . i_module . i_prefix == prefix : # check local identities
pmodule = stmt . i_module
else : # this is a prefixed name , check the imported modules
pmodule = prefix_to_module ( stmt . i_module , prefix , stmt . pos , ctx . errors )
if pmodule is None :
return
if name in pmodule . i_identities :
i = pmodule . i_identities [ name ]
if prefix is None and not is_submodule_included ( stmt , i ) :
pass
else :
stmt . i_identity = i
v_type_identity ( ctx , stmt . i_identity )
if stmt . i_identity is None and no_error_report == False :
err_add ( ctx . errors , stmt . pos , 'IDENTITY_NOT_FOUND' , ( name , pmodule . arg ) ) |
def get_float ( self , key : str ) -> Optional [ float ] :
"""Returns an optional configuration value , as a float , by its key , or None if it doesn ' t exist .
If the configuration value isn ' t a legal float , this function will throw an error .
: param str key : The requested configuration key .
: return : The configuration key ' s value , or None if one does not exist .
: rtype : Optional [ float ]
: raises ConfigTypeError : The configuration value existed but couldn ' t be coerced to float .""" | v = self . get ( key )
if v is None :
return None
try :
return float ( v )
except :
raise ConfigTypeError ( self . full_key ( key ) , v , 'float' ) |
def _set_tag ( self , tag = None , tags = None , value = True ) :
"""Sets the value of a specific tag or merges existing tags with a dict of new tags .
Either tag or tags must be None .
: param tag : Tag which needs to be set .
: param tags : Set of tags which needs to be merged with existing tags .
: param value : Value to set for net tag named by : param tag .
: return : Nothing""" | existing_tags = self . _requirements . get ( "tags" )
if tags and not tag :
existing_tags = merge ( existing_tags , tags )
self . _requirements [ "tags" ] = existing_tags
elif tag and not tags :
existing_tags [ tag ] = value
self . _requirements [ "tags" ] = existing_tags |
def verify ( self , ** kwargs ) :
"""Implementations MUST either return both a Client Configuration Endpoint
and a Registration Access Token or neither of them .
: param kwargs :
: return : True if the message is OK otherwise False""" | super ( RegistrationResponse , self ) . verify ( ** kwargs )
has_reg_uri = "registration_client_uri" in self
has_reg_at = "registration_access_token" in self
if has_reg_uri != has_reg_at :
raise VerificationError ( ( "Only one of registration_client_uri" " and registration_access_token present" ) , self )
return True |
def duplicates ( table , key = None , presorted = False , buffersize = None , tempdir = None , cache = True ) :
"""Select rows with duplicate values under a given key ( or duplicate
rows where no key is given ) . E . g . : :
> > > import petl as etl
> > > table1 = [ [ ' foo ' , ' bar ' , ' baz ' ] ,
. . . [ ' A ' , 1 , 2.0 ] ,
. . . [ ' B ' , 2 , 3.4 ] ,
. . . [ ' D ' , 6 , 9.3 ] ,
. . . [ ' B ' , 3 , 7.8 ] ,
. . . [ ' B ' , 2 , 12.3 ] ,
. . . [ ' E ' , None , 1.3 ] ,
. . . [ ' D ' , 4 , 14.5 ] ]
> > > table2 = etl . duplicates ( table1 , ' foo ' )
> > > table2
| foo | bar | baz |
| ' B ' | 2 | 3.4 |
| ' B ' | 3 | 7.8 |
| ' B ' | 2 | 12.3 |
| ' D ' | 6 | 9.3 |
| ' D ' | 4 | 14.5 |
> > > # compound keys are supported
. . . table3 = etl . duplicates ( table1 , key = [ ' foo ' , ' bar ' ] )
> > > table3
| foo | bar | baz |
| ' B ' | 2 | 3.4 |
| ' B ' | 2 | 12.3 |
If ` presorted ` is True , it is assumed that the data are already sorted by
the given key , and the ` buffersize ` , ` tempdir ` and ` cache ` arguments are
ignored . Otherwise , the data are sorted , see also the discussion of the
` buffersize ` , ` tempdir ` and ` cache ` arguments under the
: func : ` petl . transform . sorts . sort ` function .
See also : func : ` petl . transform . dedup . unique ` and
: func : ` petl . transform . dedup . distinct ` .""" | return DuplicatesView ( table , key = key , presorted = presorted , buffersize = buffersize , tempdir = tempdir , cache = cache ) |
def update_flapping ( self , notif_period , hosts , services ) :
"""Compute the sample list ( self . flapping _ changes ) and determine
whether the host / service is flapping or not
: param notif _ period : notification period object for this host / service
: type notif _ period : alignak . object . timeperiod . Timeperiod
: param hosts : Hosts objects , used to create notification if necessary
: type hosts : alignak . objects . host . Hosts
: param services : Services objects , used to create notification if necessary
: type services : alignak . objects . service . Services
: return : None
: rtype : Nonetype""" | flap_history = self . __class__ . flap_history
# We compute the flapping change in %
res = 0.0
i = 0
for has_changed in self . flapping_changes :
i += 1
if has_changed :
res += i * ( 1.2 - 0.8 ) / flap_history + 0.8
res = res / flap_history
res *= 100
# We can update our value
self . percent_state_change = res
# Look if we are full in our states , because if not
# the value is not accurate
is_full = len ( self . flapping_changes ) >= flap_history
# Now we get the low _ flap _ threshold and high _ flap _ threshold values
# They can be from self , or class
( low_flap_threshold , high_flap_threshold ) = ( self . low_flap_threshold , self . high_flap_threshold )
# TODO : no more useful because a default value is defined , but is it really correct ?
if low_flap_threshold == - 1 : # pragma : no cover , never used
cls = self . __class__
low_flap_threshold = cls . global_low_flap_threshold
if high_flap_threshold == - 1 : # pragma : no cover , never used
cls = self . __class__
high_flap_threshold = cls . global_high_flap_threshold
# Now we check is flapping change , but only if we got enough
# states to look at the value accuracy
if self . is_flapping and res < low_flap_threshold and is_full :
self . is_flapping = False
# We also raise a log entry
self . raise_flapping_stop_log_entry ( res , low_flap_threshold )
# and a notification
self . remove_in_progress_notifications ( master = True )
self . create_notifications ( 'FLAPPINGSTOP' , notif_period , hosts , services )
# And update our status for modules
self . broks . append ( self . get_update_status_brok ( ) )
if not self . is_flapping and res >= high_flap_threshold and is_full :
self . is_flapping = True
# We also raise a log entry
self . raise_flapping_start_log_entry ( res , high_flap_threshold )
# and a notification
self . remove_in_progress_notifications ( master = True )
self . create_notifications ( 'FLAPPINGSTART' , notif_period , hosts , services )
# And update our status for modules
self . broks . append ( self . get_update_status_brok ( ) ) |
def quote_name ( self , name ) :
"""Returns a quoted version of the given table , index or column name . Does
not quote the given name if it ' s already been quoted .""" | if name . startswith ( self . left_sql_quote ) and name . endswith ( self . right_sql_quote ) :
return name
# Quoting once is enough .
return '%s%s%s' % ( self . left_sql_quote , name , self . right_sql_quote ) |
def record_close ( object_id , input_params = { } , always_retry = True , ** kwargs ) :
"""Invokes the / record - xxxx / close API method .
For more info , see : https : / / wiki . dnanexus . com / API - Specification - v1.0.0 / Data - Object - Lifecycle # API - method % 3A - % 2Fclass - xxxx % 2Fclose""" | return DXHTTPRequest ( '/%s/close' % object_id , input_params , always_retry = always_retry , ** kwargs ) |
def doesNotMatch ( self , value , caseSensitive = True ) :
"""Sets the operator type to Query . Op . DoesNotMatch and sets the value to the inputted value .
: param value < variant >
: return self ( useful for chaining )
: usage | > > > from orb import Query as Q
| > > > query = Q ( ' comments ' ) . doesNotMatch ( ' test ' )
| > > > print query
| comments does _ not _ contain test""" | newq = self . copy ( )
newq . setOp ( Query . Op . DoesNotMatch )
newq . setValue ( value )
newq . setCaseSensitive ( caseSensitive )
return newq |
def get ( project , credentials = None ) :
"""Main Get method : Get project state , parameters , outputindex""" | user , oauth_access_token = parsecredentials ( credentials )
if not Project . exists ( project , user ) :
return withheaders ( flask . make_response ( "Project " + project + " was not found for user " + user , 404 ) , headers = { 'allow_origin' : settings . ALLOW_ORIGIN } )
# 404
else : # if user and not Project . access ( project , user ) and not user in settings . ADMINS :
# return flask . make _ response ( " Access denied to project " + project + " for user " + user , 401 ) # 401
datafile = os . path . join ( Project . path ( project , credentials ) , 'clam.xml' )
statuscode , statusmsg , statuslog , completion = Project . status ( project , user )
# pylint : disable = unused - variable
if statuscode == clam . common . status . DONE and os . path . exists ( datafile ) :
f = io . open ( datafile , 'r' , encoding = 'utf-8' )
xmldata = f . read ( os . path . getsize ( datafile ) )
f . close ( )
data = clam . common . data . CLAMData ( xmldata , None , False , Project . path ( project , credentials ) , loadmetadata = False )
return Project . response ( user , project , settings . PARAMETERS , "" , False , oauth_access_token , ',' . join ( [ str ( x ) for x in data . program . matchedprofiles ] ) if data . program else "" , data . program )
# 200
else : # HTTP request parameters may be used to pre - set global parameters when starting a project ( issue # 66)
for parametergroup , parameterlist in settings . PARAMETERS : # pylint : disable = unused - variable
for parameter in parameterlist :
value = parameter . valuefrompostdata ( flask . request . values )
if value is not None :
parameter . set ( value )
return Project . response ( user , project , settings . PARAMETERS , "" , False , oauth_access_token ) |
def python_portable_string ( string , encoding = 'utf-8' ) :
"""Converts bytes into a string type .
Valid string types are retuned without modification . So in Python 2 , type str
and unicode are not converted .
In Python 3 , type bytes is converted to type str ( unicode )""" | if isinstance ( string , six . string_types ) :
return string
if six . PY3 :
return string . decode ( encoding )
raise ValueError ( 'Unsupported type %s' % str ( type ( string ) ) ) |
def attribute_node ( self , name , ns_uri = None ) :
""": param string name : the name of the attribute to return .
: param ns _ uri : a URI defining a namespace constraint on the attribute .
: type ns _ uri : string or None
: return : this element ' s attributes that match ` ` ns _ uri ` ` as
: class : ` Attribute ` nodes .""" | attr_impl_node = self . adapter . get_node_attribute_node ( self . impl_node , name , ns_uri )
return self . adapter . wrap_node ( attr_impl_node , self . adapter . impl_document , self . adapter ) |
def quantile ( arg , quantile , interpolation = 'linear' ) :
"""Return value at the given quantile , a la numpy . percentile .
Parameters
quantile : float / int or array - like
0 < = quantile < = 1 , the quantile ( s ) to compute
interpolation : { ' linear ' , ' lower ' , ' higher ' , ' midpoint ' , ' nearest ' }
This optional parameter specifies the interpolation method to use ,
when the desired quantile lies between two data points ` i ` and ` j ` :
* linear : ` i + ( j - i ) * fraction ` , where ` fraction ` is the
fractional part of the index surrounded by ` i ` and ` j ` .
* lower : ` i ` .
* higher : ` j ` .
* nearest : ` i ` or ` j ` whichever is nearest .
* midpoint : ( ` i ` + ` j ` ) / 2.
Returns
quantile
if scalar input , scalar type , same as input
if array input , list of scalar type""" | if isinstance ( quantile , collections . abc . Sequence ) :
op = ops . MultiQuantile ( arg , quantile , interpolation )
else :
op = ops . Quantile ( arg , quantile , interpolation )
return op . to_expr ( ) |
def call ( payload = None ) :
'''This function captures the query string and sends it to the Palo Alto device .''' | r = None
try :
if DETAILS [ 'method' ] == 'dev_key' : # Pass the api key without the target declaration
conditional_payload = { 'key' : DETAILS [ 'apikey' ] }
payload . update ( conditional_payload )
r = __utils__ [ 'http.query' ] ( DETAILS [ 'url' ] , data = payload , method = 'POST' , decode_type = 'plain' , decode = True , verify_ssl = False , status = True , raise_error = True )
elif DETAILS [ 'method' ] == 'dev_pass' : # Pass credentials without the target declaration
r = __utils__ [ 'http.query' ] ( DETAILS [ 'url' ] , username = DETAILS [ 'username' ] , password = DETAILS [ 'password' ] , data = payload , method = 'POST' , decode_type = 'plain' , decode = True , verify_ssl = False , status = True , raise_error = True )
elif DETAILS [ 'method' ] == 'pan_key' : # Pass the api key with the target declaration
conditional_payload = { 'key' : DETAILS [ 'apikey' ] , 'target' : DETAILS [ 'serial' ] }
payload . update ( conditional_payload )
r = __utils__ [ 'http.query' ] ( DETAILS [ 'url' ] , data = payload , method = 'POST' , decode_type = 'plain' , decode = True , verify_ssl = False , status = True , raise_error = True )
elif DETAILS [ 'method' ] == 'pan_pass' : # Pass credentials with the target declaration
conditional_payload = { 'target' : DETAILS [ 'serial' ] }
payload . update ( conditional_payload )
r = __utils__ [ 'http.query' ] ( DETAILS [ 'url' ] , username = DETAILS [ 'username' ] , password = DETAILS [ 'password' ] , data = payload , method = 'POST' , decode_type = 'plain' , decode = True , verify_ssl = False , status = True , raise_error = True )
except KeyError as err :
raise salt . exceptions . CommandExecutionError ( "Did not receive a valid response from host." )
if not r :
raise salt . exceptions . CommandExecutionError ( "Did not receive a valid response from host." )
if six . text_type ( r [ 'status' ] ) not in [ '200' , '201' , '204' ] :
if six . text_type ( r [ 'status' ] ) == '400' :
raise salt . exceptions . CommandExecutionError ( "The server cannot process the request due to a client error." )
elif six . text_type ( r [ 'status' ] ) == '401' :
raise salt . exceptions . CommandExecutionError ( "The server cannot process the request because it lacks valid authentication " "credentials for the target resource." )
elif six . text_type ( r [ 'status' ] ) == '403' :
raise salt . exceptions . CommandExecutionError ( "The server refused to authorize the request." )
elif six . text_type ( r [ 'status' ] ) == '404' :
raise salt . exceptions . CommandExecutionError ( "The requested resource could not be found." )
else :
raise salt . exceptions . CommandExecutionError ( "Did not receive a valid response from host." )
xmldata = ET . fromstring ( r [ 'text' ] )
# If we are pulling the candidate configuration , we need to strip the dirtyId
if payload [ 'type' ] == 'config' and payload [ 'action' ] == 'get' :
xmldata = ( _strip_dirty ( xmldata ) )
return xml . to_dict ( xmldata , True ) |
def _log_failed_submission ( self , data ) :
"""Log a reasonable representation of an event that should have been sent
to Sentry""" | message = data . pop ( 'message' , '<no message value>' )
output = [ message ]
if 'exception' in data and 'stacktrace' in data [ 'exception' ] [ 'values' ] [ - 1 ] : # try to reconstruct a reasonable version of the exception
for frame in data [ 'exception' ] [ 'values' ] [ - 1 ] [ 'stacktrace' ] . get ( 'frames' , [ ] ) :
output . append ( ' File "%(fn)s", line %(lineno)s, in %(func)s' % { 'fn' : frame . get ( 'filename' , 'unknown_filename' ) , 'lineno' : frame . get ( 'lineno' , - 1 ) , 'func' : frame . get ( 'function' , 'unknown_function' ) , } )
self . uncaught_logger . error ( output ) |
def parse_type ( field ) :
"""Function to pull a type from the binary payload .""" | if field . type_id == 'string' :
if 'size' in field . options :
return "parser.getString(%d)" % field . options [ 'size' ] . value
else :
return "parser.getString()"
elif field . type_id in JAVA_TYPE_MAP : # Primitive java types have extractor methods in SBPMessage . Parser
return "parser.get" + field . type_id . capitalize ( ) + "()"
if field . type_id == 'array' : # Call function to build array
t = field . options [ 'fill' ] . value
if t in JAVA_TYPE_MAP :
if 'size' in field . options :
return "parser.getArrayof%s(%d)" % ( t . capitalize ( ) , field . options [ 'size' ] . value )
else :
return "parser.getArrayof%s()" % t . capitalize ( )
else :
if 'size' in field . options :
return "parser.getArray(%s.class, %d)" % ( t , field . options [ 'size' ] . value )
else :
return "parser.getArray(%s.class)" % t
else : # This is an inner class , call default constructor
return "new %s().parse(parser)" % field . type_id |
def check_scope ( self , token , request ) :
http_method = request . method
if not hasattr ( self , http_method ) :
raise OAuthError ( "HTTP method is not recognized" )
required_scopes = getattr ( self , http_method )
# a None scope means always allowed
if required_scopes is None :
return True
"""The required scope is either a string or an iterable . If string ,
check if it is allowed for our access token otherwise , iterate through
the required _ scopes to see which scopes are allowed""" | # for non iterable types
if isinstance ( required_scopes , six . string_types ) :
if token . allow_scopes ( required_scopes . split ( ) ) :
return [ required_scopes ]
return [ ]
allowed_scopes = [ ]
try :
for scope in required_scopes :
if token . allow_scopes ( scope . split ( ) ) :
allowed_scopes . append ( scope )
except :
raise Exception ( 'Invalid required scope values' )
else :
return allowed_scopes |
def Binomial ( n , p , tag = None ) :
"""A Binomial random variate
Parameters
n : int
The number of trials
p : scalar
The probability of success""" | assert ( int ( n ) == n and n > 0 ) , 'Binomial number of trials "n" must be an integer greater than zero'
assert ( 0 < p < 1 ) , 'Binomial probability "p" must be between zero and one, non-inclusive'
return uv ( ss . binom ( n , p ) , tag = tag ) |
def load_map_coordinates ( map_file ) :
"""Loads map coordinates from netCDF or pickle file created by util . makeMapGrids .
Args :
map _ file : Filename for the file containing coordinate information .
Returns :
Latitude and longitude grids as numpy arrays .""" | if map_file [ - 4 : ] == ".pkl" :
map_data = pickle . load ( open ( map_file ) )
lon = map_data [ 'lon' ]
lat = map_data [ 'lat' ]
else :
map_data = Dataset ( map_file )
if "lon" in map_data . variables . keys ( ) :
lon = map_data . variables [ 'lon' ] [ : ]
lat = map_data . variables [ 'lat' ] [ : ]
else :
lon = map_data . variables [ "XLONG" ] [ 0 ]
lat = map_data . variables [ "XLAT" ] [ 0 ]
return lon , lat |
def input_stages ( self ) :
"""List ( int ) : Entry IDs for stages that were inputs for this stage .""" | if self . _properties . get ( "inputStages" ) is None :
return [ ]
return [ _helpers . _int_or_none ( entry ) for entry in self . _properties . get ( "inputStages" ) ] |
def _chunk_with_padding ( self , iterable , n , fillvalue = None ) :
"Collect data into fixed - length chunks or blocks" | # _ chunk _ with _ padding ( ' ABCDEFG ' , 3 , ' x ' ) - - > ABC DEF Gxx "
args = [ iter ( iterable ) ] * n
return zip_longest ( * args , fillvalue = fillvalue ) |
def from_dict ( cls , parm , vrf = None ) :
"""Create new VRF - object from dict .
Suitable for creating objects from XML - RPC data .
All available keys must exist .""" | if vrf is None :
vrf = VRF ( )
vrf . id = parm [ 'id' ]
vrf . rt = parm [ 'rt' ]
vrf . name = parm [ 'name' ]
vrf . description = parm [ 'description' ]
vrf . tags = { }
for tag_name in parm [ 'tags' ] :
tag = Tag . from_dict ( { 'name' : tag_name } )
vrf . tags [ tag_name ] = tag
vrf . avps = parm [ 'avps' ]
vrf . num_prefixes_v4 = int ( parm [ 'num_prefixes_v4' ] )
vrf . num_prefixes_v6 = int ( parm [ 'num_prefixes_v6' ] )
vrf . total_addresses_v4 = int ( parm [ 'total_addresses_v4' ] )
vrf . total_addresses_v6 = int ( parm [ 'total_addresses_v6' ] )
vrf . used_addresses_v4 = int ( parm [ 'used_addresses_v4' ] )
vrf . used_addresses_v6 = int ( parm [ 'used_addresses_v6' ] )
vrf . free_addresses_v4 = int ( parm [ 'free_addresses_v4' ] )
vrf . free_addresses_v6 = int ( parm [ 'free_addresses_v6' ] )
return vrf |
def show_usage ( docstring , short , stream , exitcode ) :
"""Print program usage information and exit .
: arg str docstring : the program help text
This function just prints * docstring * and exits . In most cases , the
function : func : ` check _ usage ` should be used : it automatically checks
: data : ` sys . argv ` for a sole " - h " or " - - help " argument and invokes this
function .
This function is provided in case there are instances where the user
should get a friendly usage message that : func : ` check _ usage ` doesn ' t catch .
It can be contrasted with : func : ` wrong _ usage ` , which prints a terser usage
message and exits with an error code .""" | if stream is None :
from sys import stdout as stream
if not short :
print ( 'Usage:' , docstring . strip ( ) , file = stream )
else :
intext = False
for l in docstring . splitlines ( ) :
if intext :
if not len ( l ) :
break
print ( l , file = stream )
elif len ( l ) :
intext = True
print ( 'Usage:' , l , file = stream )
print ( '\nRun with a sole argument --help for more detailed ' 'usage information.' , file = stream )
raise SystemExit ( exitcode ) |
def equals ( self , rhs ) :
"""Check to see if the RHS is an instance of class _ name .
Args :
# rhs : the right hand side of the test
rhs : object
Returns :
bool""" | try :
return isinstance ( rhs , self . _class_name )
except TypeError : # Check raw types if there was a type error . This is helpful for
# things like cStringIO . StringIO .
return type ( rhs ) == type ( self . _class_name ) |
def mac ( address = '' , interface = '' , vlan = 0 , ** kwargs ) : # pylint : disable = unused - argument
'''Returns the MAC Address Table on the device .
: param address : MAC address to filter on
: param interface : Interface name to filter on
: param vlan : VLAN identifier
: return : A list of dictionaries representing the entries in the MAC Address Table
CLI Example :
. . code - block : : bash
salt ' * ' net . mac
salt ' * ' net . mac vlan = 10
Example output :
. . code - block : : python
' mac ' : ' 00:1c : 58:29:4a : 71 ' ,
' interface ' : ' xe - 3/0/2 ' ,
' static ' : False ,
' active ' : True ,
' moves ' : 1,
' vlan ' : 10,
' last _ move ' : 1454417742.58
' mac ' : ' 8c : 60:4f : 58 : e1 : c1 ' ,
' interface ' : ' xe - 1/0/1 ' ,
' static ' : False ,
' active ' : True ,
' moves ' : 2,
' vlan ' : 42,
' last _ move ' : 1453191948.11''' | proxy_output = salt . utils . napalm . call ( napalm_device , # pylint : disable = undefined - variable
'get_mac_address_table' , ** { } )
if not proxy_output . get ( 'result' ) : # if negative , leave the output unchanged
return proxy_output
mac_address_table = proxy_output . get ( 'out' )
if vlan and isinstance ( vlan , int ) :
mac_address_table = _filter_list ( mac_address_table , 'vlan' , vlan )
if address :
mac_address_table = _filter_list ( mac_address_table , 'mac' , address )
if interface :
mac_address_table = _filter_list ( mac_address_table , 'interface' , interface )
proxy_output . update ( { 'out' : mac_address_table } )
return proxy_output |
def _load_first ( self , target_uris , load_method , ** kwargs ) :
"""Load first yamldict target found in uri list .
: param target _ uris : Uris to try and open
: param load _ method : load callback
: type target _ uri : list or string
: type load _ method : callback
: returns : yamldict""" | if isinstance ( target_uris , string_types ) :
target_uris = [ target_uris ]
# TODO : Move the list logic into the extension , otherwise a
# load will always try all missing files first .
# TODO : How would multiple protocols work , should the registry hold
# persist copies ?
for target_uri in target_uris :
target = urlsplit ( target_uri , scheme = self . default_protocol )
extension = self . get_extension ( target . scheme )
query = extension . conform_query ( target . query )
try :
yaml_dict = extension . load_target ( target . scheme , target . path , target . fragment , target . username , target . password , target . hostname , target . port , query , load_method , ** kwargs )
return yaml_dict
except extension . not_found_exception :
pass
raise IOError ( "unable to load: {0}" . format ( target_uris ) ) |
def update_task ( self , task_id , revision , title = None , assignee_id = None , completed = None , recurrence_type = None , recurrence_count = None , due_date = None , starred = None , remove = None ) :
'''Updates the task with the given ID to have the given information
NOTE : The ' remove ' parameter is an optional list of parameters to remove from the given task , e . g . [ ' due _ date ' ]''' | return tasks_endpoint . update_task ( self , task_id , revision , title = title , assignee_id = assignee_id , completed = completed , recurrence_type = recurrence_type , recurrence_count = recurrence_count , due_date = due_date , starred = starred , remove = remove ) |
def _incident_transform ( incident ) :
"""Get output dict from incident .""" | return { 'id' : incident . get ( 'cdid' ) , 'type' : incident . get ( 'type' ) , 'timestamp' : incident . get ( 'date' ) , 'lat' : incident . get ( 'lat' ) , 'lon' : incident . get ( 'lon' ) , 'location' : incident . get ( 'address' ) , 'link' : incident . get ( 'link' ) } |
def add ( self , * args , ** kwargs ) :
"""Add to list of cats""" | return self . cat . select . add ( * args , ** kwargs ) |
def matrix_to_timeseries ( image , matrix , mask = None ) :
"""converts a matrix to a ND image .
ANTsR function : ` matrix2timeseries `
Arguments
image : reference ND image
matrix : matrix to convert to image
mask : mask image defining voxels of interest
Returns
ANTsImage
Example
> > > import ants
> > > img = ants . make _ image ( ( 10,10,10,5 ) )
> > > mask = ants . ndimage _ to _ list ( img ) [ 0 ] * 0
> > > mask [ 4:8 , 4:8 , 4:8 ] = 1
> > > mat = ants . timeseries _ to _ matrix ( img , mask = mask )
> > > img2 = ants . matrix _ to _ timeseries ( img , mat , mask )""" | if mask is None :
mask = temp [ 0 ] * 0 + 1
temp = matrix_to_images ( matrix , mask )
newImage = utils . list_to_ndimage ( image , temp )
iio . copy_image_info ( image , newImage )
return ( newImage ) |
def add_chain ( self , chain ) :
"""Add block in a chain in the correct order . Also add all of the blocks
to the cache before doing a purge .""" | with self . _lock :
chain . sort ( key = lambda x : x . block_num )
for block in chain :
block_id = block . header_signature
if block_id not in self . _cache :
self . _cache [ block_id ] = self . CachedValue ( block )
if block . previous_block_id in self . _cache :
self . _cache [ block . previous_block_id ] . inc_count ( )
if time . time ( ) > self . _next_purge_time :
self . _purge_expired ( )
self . _next_purge_time = time . time ( ) + self . _purge_frequency |
def dfs ( node , expand = expansion_all , callback = None , silent = True ) :
"""Perform a depth - first search on the node graph
: param node : GraphNode
: param expand : Returns the list of Nodes to explore from a Node
: param callback : Callback to run in each node
: param silent : Don ' t throw exception on circular dependency
: return :""" | nodes = deque ( )
for n in expand ( node ) :
nodes . append ( n )
while nodes :
n = nodes . pop ( )
n . visits += 1
if callback :
callback ( n )
for k in expand ( n ) :
if k . visits < 1 :
nodes . append ( k )
else :
if not silent :
raise CircularDependency ( 'Circular Dependency' ) |
def view_pmap ( token , dstore ) :
"""Display the mean ProbabilityMap associated to a given source group name""" | grp = token . split ( ':' ) [ 1 ]
# called as pmap : grp
pmap = { }
rlzs_assoc = dstore [ 'csm_info' ] . get_rlzs_assoc ( )
pgetter = getters . PmapGetter ( dstore , rlzs_assoc )
pmap = pgetter . get_mean ( grp )
return str ( pmap ) |
def _REOM ( y , t , pot , l2 ) :
"""NAME :
_ REOM
PURPOSE :
implements the EOM , i . e . , the right - hand side of the differential
equation
INPUT :
y - current phase - space position
t - current time
pot - ( list of ) Potential instance ( s )
l2 - angular momentum squared
OUTPUT :
dy / dt
HISTORY :
2010-07-20 - Written - Bovy ( NYU )""" | return [ y [ 1 ] , l2 / y [ 0 ] ** 3. + _evaluateplanarRforces ( pot , y [ 0 ] , t = t ) ] |
def sample_bitstrings ( self , n_samples ) :
"""Sample bitstrings from the distribution defined by the wavefunction .
Qubit 0 is at ` ` out [ : , 0 ] ` ` .
: param n _ samples : The number of bitstrings to sample
: return : An array of shape ( n _ samples , n _ qubits )""" | if self . rs is None :
raise ValueError ( "You have tried to perform a stochastic operation without setting the " "random state of the simulator. Might I suggest using a PyQVM object?" )
probabilities = np . abs ( self . wf ) ** 2
possible_bitstrings = all_bitstrings ( self . n_qubits )
inds = self . rs . choice ( 2 ** self . n_qubits , n_samples , p = probabilities )
bitstrings = possible_bitstrings [ inds , : ]
bitstrings = np . flip ( bitstrings , axis = 1 )
# qubit ordering : 0 on the left .
return bitstrings |
def delaunay_graph ( X , weighted = False ) :
'''Delaunay triangulation graph .''' | e1 , e2 = _delaunay_edges ( X )
pairs = np . column_stack ( ( e1 , e2 ) )
w = paired_distances ( X [ e1 ] , X [ e2 ] ) if weighted else None
return Graph . from_edge_pairs ( pairs , num_vertices = X . shape [ 0 ] , symmetric = True , weights = w ) |
def append_sint32 ( self , value ) :
"""Appends a 32 - bit integer to our buffer , zigzag - encoded and then
varint - encoded .""" | zigzag_value = wire_format . zig_zag_encode ( value )
self . _stream . append_var_uint32 ( zigzag_value ) |
def tune ( runner , kernel_options , device_options , tuning_options ) :
"""Find the best performing kernel configuration in the parameter space
: params runner : A runner from kernel _ tuner . runners
: type runner : kernel _ tuner . runner
: param kernel _ options : A dictionary with all options for the kernel .
: type kernel _ options : kernel _ tuner . interface . Options
: param device _ options : A dictionary with all options for the device
on which the kernel should be tuned .
: type device _ options : kernel _ tuner . interface . Options
: param tuning _ options : A dictionary with all options regarding the tuning
process .
: type tuning _ options : kernel _ tuner . interface . Options
: returns : A list of dictionaries for executed kernel configurations and their
execution times . And a dictionary that contains a information
about the hardware / software environment on which the tuning took place .
: rtype : list ( dict ( ) ) , dict ( )""" | dna_size = len ( tuning_options . tune_params . keys ( ) )
pop_size = 20
generations = 100
tuning_options [ "scaling" ] = False
tune_params = tuning_options . tune_params
population = random_population ( dna_size , pop_size , tune_params )
best_time = 1e20
all_results = [ ]
cache = { }
for generation in range ( generations ) :
if tuning_options . verbose :
print ( "Generation %d, best_time %f" % ( generation , best_time ) )
# determine fitness of population members
weighted_population = [ ]
for dna in population :
time = _cost_func ( dna , kernel_options , tuning_options , runner , all_results , cache )
weighted_population . append ( ( dna , time ) )
population = [ ]
# ' best _ time ' is used only for printing
if tuning_options . verbose and all_results :
best_time = min ( all_results , key = lambda x : x [ "time" ] ) [ "time" ]
# population is sorted such that better configs have higher chance of reproducing
weighted_population . sort ( key = lambda x : x [ 1 ] )
# crossover and mutate
for _ in range ( pop_size // 2 ) :
ind1 = weighted_choice ( weighted_population )
ind2 = weighted_choice ( weighted_population )
ind1 , ind2 = crossover ( ind1 , ind2 )
population . append ( mutate ( ind1 , dna_size , tune_params ) )
population . append ( mutate ( ind2 , dna_size , tune_params ) )
return all_results , runner . dev . get_environment ( ) |
async def is_pairwise_exists ( wallet_handle : int , their_did : str ) -> bool :
"""Check if pairwise is exists .
: param wallet _ handle : wallet handler ( created by open _ wallet ) .
: param their _ did : encoded Did .
: return : true - if pairwise is exists , false - otherwise""" | logger = logging . getLogger ( __name__ )
logger . debug ( "is_pairwise_exists: >>> wallet_handle: %r, their_did: %r" , wallet_handle , their_did )
if not hasattr ( is_pairwise_exists , "cb" ) :
logger . debug ( "is_pairwise_exists: Creating callback" )
is_pairwise_exists . cb = create_cb ( CFUNCTYPE ( None , c_int32 , c_int32 , c_bool ) )
c_wallet_handle = c_int32 ( wallet_handle )
c_their_did = c_char_p ( their_did . encode ( 'utf-8' ) )
res = await do_call ( 'indy_is_pairwise_exists' , c_wallet_handle , c_their_did , is_pairwise_exists . cb )
logger . debug ( "is_pairwise_exists: <<< res: %r" , res )
return res |
def surround_parse ( self , node , pre_char , post_char ) :
"""Parse the subnodes of a given node . Subnodes with tags in the
` ignore ` list are ignored . Prepend ` pre _ char ` and append ` post _ char ` to
the output in self . pieces .""" | self . add_text ( pre_char )
self . subnode_parse ( node )
self . add_text ( post_char ) |
def make_datalab_help_action ( self ) :
"""Custom action for - - datalab - help .
The action output the package specific parameters and will be part of " % % ml train "
help string .""" | datalab_help = self . datalab_help
epilog = self . datalab_epilog
class _CustomAction ( argparse . Action ) :
def __init__ ( self , option_strings , dest , help = None ) :
super ( _CustomAction , self ) . __init__ ( option_strings = option_strings , dest = dest , nargs = 0 , help = help )
def __call__ ( self , parser , args , values , option_string = None ) :
print ( '\n\n' . join ( datalab_help ) )
if epilog :
print ( epilog )
# We have printed all help string datalab needs . If we don ' t quit , it will complain about
# missing required arguments later .
quit ( )
return _CustomAction |
def sign ( user_id , user_type = None , today = None , session = None ) :
"""Check user id for validity , then sign user in if they are signed
out , or out if they are signed in .
: param user _ id : The ID of the user to sign in or out .
: param user _ type : ( optional ) Specify whether user is signing in as a ` ' student ' ` or ` ' tutor ' ` .
: param today : ( optional ) The current date as a ` datetime . date ` object . Used for testing .
: param session : ( optional ) SQLAlchemy session through which to access the database .
: return : ` Status ` named tuple object . Information about the sign attempt .""" | # noqa
if session is None :
session = Session ( )
else :
session = session
if today is None :
today = date . today ( )
else :
today = today
user = ( session . query ( User ) . filter ( User . user_id == user_id ) . one_or_none ( ) )
if user :
signed_in_entries = ( user . entries . filter ( Entry . date == today ) . filter ( Entry . time_out . is_ ( None ) ) . all ( ) )
if not signed_in_entries :
new_entry = sign_in ( user , user_type = user_type )
session . add ( new_entry )
status = Status ( valid = True , in_or_out = 'in' , user_name = get_user_name ( user ) , user_type = new_entry . user_type , entry = new_entry )
else :
for entry in signed_in_entries :
signed_out_entry = sign_out ( entry )
session . add ( signed_out_entry )
status = Status ( valid = True , in_or_out = 'out' , user_name = get_user_name ( user ) , user_type = signed_out_entry . user_type , entry = signed_out_entry )
session . commit ( )
else :
raise UnregisteredUser ( '{} not registered. Please register at the front desk.' . format ( user_id ) )
logger . debug ( status )
return status |
def iter_shortcuts ( ) :
"""Iterate over keyboard shortcuts .""" | for context_name , keystr in CONF . items ( 'shortcuts' ) :
context , name = context_name . split ( "/" , 1 )
yield context , name , keystr |
def aget ( dct , key ) :
r"""Allow to get values deep in a dict with iterable keys
Accessing leaf values is quite straightforward :
> > > dct = { ' a ' : { ' x ' : 1 , ' b ' : { ' c ' : 2 } } }
> > > aget ( dct , ( ' a ' , ' x ' ) )
> > > aget ( dct , ( ' a ' , ' b ' , ' c ' ) )
If key is empty , it returns unchanged the ` ` dct ` ` value .
> > > aget ( { ' x ' : 1 } , ( ) )
{ ' x ' : 1}""" | key = iter ( key )
try :
head = next ( key )
except StopIteration :
return dct
if isinstance ( dct , list ) :
try :
idx = int ( head )
except ValueError :
raise IndexNotIntegerError ( "non-integer index %r provided on a list." % head )
try :
value = dct [ idx ]
except IndexError :
raise IndexOutOfRange ( "index %d is out of range (%d elements in list)." % ( idx , len ( dct ) ) )
else :
try :
value = dct [ head ]
except KeyError : # # Replace with a more informative KeyError
raise MissingKeyError ( "missing key %r in dict." % ( head , ) )
except Exception :
raise NonDictLikeTypeError ( "can't query subvalue %r of a leaf%s." % ( head , ( " (leaf value is %r)" % dct ) if len ( repr ( dct ) ) < 15 else "" ) )
return aget ( value , key ) |
def process ( self ) :
"""This method handles the actual processing of Modules and Transforms""" | self . modules . sort ( key = lambda x : x . priority )
for module in self . modules :
transforms = module . transform ( self . data )
transforms . sort ( key = lambda x : x . linenum , reverse = True )
for transform in transforms :
linenum = transform . linenum
if isinstance ( transform . data , basestring ) :
transform . data = [ transform . data ]
if transform . oper == "prepend" :
self . data [ linenum : linenum ] = transform . data
elif transform . oper == "append" :
self . data [ linenum + 1 : linenum + 1 ] = transform . data
elif transform . oper == "swap" :
self . data [ linenum : linenum + 1 ] = transform . data
elif transform . oper == "drop" :
self . data [ linenum : linenum + 1 ] = [ ]
elif transform . oper == "noop" :
pass |
def scopusParser ( scopusFile ) :
"""Parses a scopus file , _ scopusFile _ , to extract the individual lines as [ ScopusRecords ] ( . . / classes / ScopusRecord . html # metaknowledge . scopus . ScopusRecord ) .
A Scopus file is a csv ( Comma - separated values ) with a complete header , see [ ` scopus . scopusHeader ` ] ( # metaknowledge . scopus ) for the entries , and each line after it containing a record ' s entry . The string valued entries are quoted with double quotes which means double quotes inside them can cause issues , see [ scopusRecordParser ( ) ] ( # metaknowledge . scopus . recordScopus . scopusRecordParser ) for more information .
# Parameters
_ scopusFile _ : ` str `
> A path to a valid scopus file , use [ isScopusFile ( ) ] ( # metaknowledge . scopus . scopusHandlers . isScopusFile ) to verify
# Returns
` set [ ScopusRecord ] `
> Records for each of the entries""" | # assumes the file is Scopus
recSet = set ( )
error = None
lineNum = 0
try :
with open ( scopusFile , 'r' , encoding = 'utf-8' ) as openfile : # Get rid of the BOM
openfile . read ( 1 )
header = openfile . readline ( ) [ : - 1 ] . split ( ',' )
if len ( set ( header ) ^ set ( scopusHeader ) ) == 0 :
header = None
lineNum = 0
try :
for line , row in enumerate ( openfile , start = 2 ) :
lineNum = line
recSet . add ( ScopusRecord ( row , header = header , sFile = scopusFile , sLine = line ) )
except BadScopusFile as e :
if error is None :
error = BadScopusFile ( "The file '{}' becomes unparsable after line: {}, due to the error: {} " . format ( scopusFile , lineNum , e ) )
except ( csv . Error , UnicodeDecodeError ) :
if error is None :
error = BadScopusFile ( "The file '{}' has parts of it that are unparsable starting at line: {}." . format ( scopusFile , lineNum ) )
return recSet , error |
def get_obsolete_messages ( self , domain ) :
"""Returns obsolete valid messages after operation .
@ type domain : str
@ rtype : dict""" | if domain not in self . domains :
raise ValueError ( 'Invalid domain: {0}' . format ( domain ) )
if domain not in self . messages or 'obsolete' not in self . messages [ domain ] :
self . _process_domain ( domain )
return self . messages [ domain ] [ 'obsolete' ] |
def remove_cert ( name , thumbprint , context = _DEFAULT_CONTEXT , store = _DEFAULT_STORE ) :
'''Remove the certificate from the given certificate store .
: param str thumbprint : The thumbprint value of the target certificate .
: param str context : The name of the certificate store location context .
: param str store : The name of the certificate store .
Example of usage with only the required arguments :
. . code - block : : yaml
site0 - cert - removed :
win _ pki . remove _ cert :
- thumbprint : 9988776655443322111000AAABBBCCCDDDEEEFFF
Example of usage specifying all available arguments :
. . code - block : : yaml
site0 - cert - removed :
win _ pki . remove _ cert :
- thumbprint : 9988776655443322111000AAABBBCCCDDDEEEFFF
- context : LocalMachine
- store : My''' | ret = { 'name' : name , 'changes' : dict ( ) , 'comment' : six . text_type ( ) , 'result' : None }
store_path = r'Cert:\{0}\{1}' . format ( context , store )
current_certs = __salt__ [ 'win_pki.get_certs' ] ( context = context , store = store )
if thumbprint not in current_certs :
ret [ 'comment' ] = "Certificate '{0}' already removed from store: {1}" . format ( thumbprint , store_path )
ret [ 'result' ] = True
elif __opts__ [ 'test' ] :
ret [ 'comment' ] = "Certificate '{0}' will be removed from store: {1}" . format ( thumbprint , store_path )
ret [ 'changes' ] = { 'old' : thumbprint , 'new' : None }
else :
ret [ 'changes' ] = { 'old' : thumbprint , 'new' : None }
ret [ 'result' ] = __salt__ [ 'win_pki.remove_cert' ] ( thumbprint = thumbprint , context = context , store = store )
if ret [ 'result' ] :
ret [ 'comment' ] = "Certificate '{0}' removed from store: {1}" . format ( thumbprint , store_path )
else :
ret [ 'comment' ] = "Certificate '{0}' unable to be removed from store: {1}" . format ( thumbprint , store_path )
return ret |
def create_swagger_json_handler ( app , ** kwargs ) :
"""Create a handler that returns the swagger definition
for an application .
This method assumes the application is using the
TransmuteUrlDispatcher as the router .""" | spec = getattr ( app , SWAGGER_ATTR_NAME , SwaggerSpec ( ) )
_add_blueprint_specs ( app , spec )
spec_dict = spec . swagger_definition ( ** kwargs )
encoded_spec = json . dumps ( spec_dict ) . encode ( "UTF-8" )
def swagger ( ) :
return Response ( encoded_spec , # we allow CORS , so this can be requested at swagger . io
headers = { "Access-Control-Allow-Origin" : "*" } , content_type = "application/json" , )
return swagger |
def a_return_and_reconnect ( ctx ) :
"""Send new line and reconnect .""" | ctx . ctrl . send ( "\r" )
ctx . device . connect ( ctx . ctrl )
return True |
def touch ( self , mode = 0o666 , exist_ok = True ) :
"""Create a fake file for the path with the given access mode ,
if it doesn ' t exist .
Args :
mode : the file mode for the file if it does not exist
exist _ ok : if the file already exists and this is True , nothing
happens , otherwise FileExistError is raised
Raises :
OSError : ( Python 2 only ) if the file exists and exits _ ok is False .
FileExistsError : ( Python 3 only ) if the file exists and exits _ ok is
False .""" | if self . _closed :
self . _raise_closed ( )
if self . exists ( ) :
if exist_ok :
self . filesystem . utime ( self . _path ( ) , None )
else :
self . filesystem . raise_os_error ( errno . EEXIST , self . _path ( ) )
else :
fake_file = self . open ( 'w' )
fake_file . close ( )
self . chmod ( mode ) |
def _validate_configuration_type ( self , configuration_type ) :
"""Validate configuration type
: param configuration _ type : configuration _ type , should be Startup or Running
: raise Exception :""" | if configuration_type . lower ( ) != 'running' and configuration_type . lower ( ) != 'startup' :
raise Exception ( self . __class__ . __name__ , 'Configuration Type is invalid. Should be startup or running' ) |
def set_We ( self , We , Eemin = None , Eemax = None , amplitude_name = None ) :
"""Normalize particle distribution so that the total energy in electrons
between Eemin and Eemax is We
Parameters
We : : class : ` ~ astropy . units . Quantity ` float
Desired energy in electrons .
Eemin : : class : ` ~ astropy . units . Quantity ` float , optional
Minimum electron energy for energy content calculation .
Eemax : : class : ` ~ astropy . units . Quantity ` float , optional
Maximum electron energy for energy content calculation .
amplitude _ name : str , optional
Name of the amplitude parameter of the particle distribution . It
must be accesible as an attribute of the distribution function .
Defaults to ` ` amplitude ` ` .""" | We = validate_scalar ( "We" , We , physical_type = "energy" )
oldWe = self . compute_We ( Eemin = Eemin , Eemax = Eemax )
if amplitude_name is None :
try :
self . particle_distribution . amplitude *= ( We / oldWe ) . decompose ( )
except AttributeError :
log . error ( "The particle distribution does not have an attribute" " called amplitude to modify its normalization: you can" " set the name with the amplitude_name parameter of set_We" )
else :
oldampl = getattr ( self . particle_distribution , amplitude_name )
setattr ( self . particle_distribution , amplitude_name , oldampl * ( We / oldWe ) . decompose ( ) , ) |
def validate ( self , value , param_name , exc = None , logger = None ) :
""": param value : value to validate
: param param _ name : name of the value ( for logging purpose )
: param exc : exception to raise ( default is " ValidatorError " )
: param logger : logger to use ( default will be " Validator . logger " )""" | if exc is not None :
self . exc = exc
if logger is not None :
self . logger = logger
if self . type is not None and not type ( value ) == self . type : # pylint : disable = unidiomatic - typecheck
self . error ( f'invalid type for parameter "{param_name}": {type(value)} (value: {value}) -- expected {self.type}' )
if self . instance is not None and not isinstance ( value , self . instance ) :
self . error ( f'invalid instance for parameter "{param_name}": {type(value)} (value: {value})' f' -- expected {self.instance}' )
if self . min is not None and value < self . min :
self . error ( f'invalid value for parameter "{param_name}" (under minima of {self.min}): {value}' )
if self . max is not None and value > self . max :
self . error ( f'invalid value for parameter "{param_name}" (over maxima if {self.max}): {value}' )
if self . regex is not None and not re_full_match ( self . regex , value ) :
self . error ( f'invalid value for parameter "{param_name}" (should match: "{self.regex}"): {value}' )
if self . in_list is not None and value not in self . in_list :
self . error ( f'invalid value for parameter "{param_name}"; "{value}" is not in list: {self.in_list}' )
if self . path_exists and not exists ( value ) :
self . error ( f'"{param_name}" file does not exist: {value}' )
return True |
def start ( self , timeout = None ) :
"""Install the server on its IOLoop , optionally starting the IOLoop .
Parameters
timeout : float or None , optional
Time in seconds to wait for server thread to start .""" | if self . _running . isSet ( ) :
raise RuntimeError ( 'Server already started' )
self . _stopped . clear ( )
# Make sure we have an ioloop
self . ioloop = self . _ioloop_manager . get_ioloop ( )
self . _ioloop_manager . start ( )
# Set max _ buffer _ size to ensure streams are closed
# if too - large messages are received
self . _tcp_server = tornado . tcpserver . TCPServer ( self . ioloop , max_buffer_size = self . MAX_MSG_SIZE )
self . _tcp_server . handle_stream = self . _handle_stream
self . _server_sock = self . _bind_socket ( self . _bindaddr )
self . _bindaddr = self . _server_sock . getsockname ( )
self . ioloop . add_callback ( self . _install )
if timeout :
return self . _running . wait ( timeout ) |
def run ( ctx , commandline ) :
"""Run command with environment variables present .""" | file = ctx . obj [ 'FILE' ]
dotenv_as_dict = dotenv_values ( file )
if not commandline :
click . echo ( 'No command given.' )
exit ( 1 )
ret = run_command ( commandline , dotenv_as_dict )
exit ( ret ) |
def move_to_customer_quote ( self , quote_id , product_data , store_view = None ) :
"""Allows you to move products from the current quote to a customer quote .
: param quote _ id : Shopping cart ID ( quote ID )
: param product _ data , list of dicts of product details , example
' product _ id ' : 1,
' qty ' : 2,
' options ' : {
' option _ 1 ' : ' value _ 1 ' ,
' option _ 2 ' : ' value _ 2 ' ,
' bundle _ option ' : { } ,
' bundle _ option _ qty ' : { } ,
' links ' : [ ] ,
' sku ' : ' S0012345 ' ,
' qty ' : 4,
: param store _ view : Store view ID or code
: return : boolean , True if the product is moved to customer quote""" | return bool ( self . call ( 'cart_product.moveToCustomerQuote' , [ quote_id , product_data , store_view ] ) ) |
def _read_info_as_dict ( fid , values ) :
"""Convenience function to read info in axon data to a nicely organized
dict .""" | output = { }
for key , fmt in values :
val = unpack ( fmt , fid . read ( calcsize ( fmt ) ) )
if len ( val ) == 1 :
output [ key ] = val [ 0 ]
else :
output [ key ] = val
return output |
def present ( name , createdb = None , createroles = None , encrypted = None , superuser = None , inherit = None , login = None , replication = None , password = None , refresh_password = None , groups = None , user = None , maintenance_db = None , db_password = None , db_host = None , db_port = None , db_user = None ) :
'''Ensure that the named group is present with the specified privileges
Please note that the user / group notion in postgresql is just abstract , we
have roles , where users can be seen as roles with the ` ` LOGIN ` ` privilege
and groups the others .
name
The name of the group to manage
createdb
Is the group allowed to create databases ?
createroles
Is the group allowed to create other roles / users
encrypted
Should the password be encrypted in the system catalog ?
login
Should the group have login perm
inherit
Should the group inherit permissions
superuser
Should the new group be a " superuser "
replication
Should the new group be allowed to initiate streaming replication
password
The group ' s password
It can be either a plain string or a md5 postgresql hashed password : :
' md5 { MD5OF ( { password } { role } } '
If encrypted is ` ` None ` ` or ` ` True ` ` , the password will be automatically
encrypted to the previous format if it is not already done .
refresh _ password
Password refresh flag
Boolean attribute to specify whether to password comparison check
should be performed .
If refresh _ password is ` ` True ` ` , the password will be automatically
updated without extra password change check .
This behaviour makes it possible to execute in environments without
superuser access available , e . g . Amazon RDS for PostgreSQL
groups
A string of comma separated groups the group should be in
user
System user all operations should be performed on behalf of
. . versionadded : : 0.17.0
db _ user
database username if different from config or default
db _ password
user password if any password for a specified user
db _ host
Database host if different from config or default
db _ port
Database port if different from config or default''' | ret = { 'name' : name , 'changes' : { } , 'result' : True , 'comment' : 'Group {0} is already present' . format ( name ) }
# default to encrypted passwords
if encrypted is not False :
encrypted = postgres . _DEFAULT_PASSWORDS_ENCRYPTION
# maybe encrypt if it ' s not already and necessary
password = postgres . _maybe_encrypt_password ( name , password , encrypted = encrypted )
db_args = { 'maintenance_db' : maintenance_db , 'runas' : user , 'host' : db_host , 'user' : db_user , 'port' : db_port , 'password' : db_password , }
# check if group exists
mode = 'create'
group_attr = __salt__ [ 'postgres.role_get' ] ( name , return_password = not refresh_password , ** db_args )
if group_attr is not None :
mode = 'update'
# The user is not present , make it !
cret = None
update = { }
if mode == 'update' :
if ( createdb is not None and group_attr [ 'can create databases' ] != createdb ) :
update [ 'createdb' ] = createdb
if ( inherit is not None and group_attr [ 'inherits privileges' ] != inherit ) :
update [ 'inherit' ] = inherit
if login is not None and group_attr [ 'can login' ] != login :
update [ 'login' ] = login
if ( createroles is not None and group_attr [ 'can create roles' ] != createroles ) :
update [ 'createroles' ] = createroles
if ( replication is not None and group_attr [ 'replication' ] != replication ) :
update [ 'replication' ] = replication
if superuser is not None and group_attr [ 'superuser' ] != superuser :
update [ 'superuser' ] = superuser
if password is not None and ( refresh_password or group_attr [ 'password' ] != password ) :
update [ 'password' ] = True
if mode == 'create' or ( mode == 'update' and update ) :
if __opts__ [ 'test' ] :
if update :
ret [ 'changes' ] [ name ] = update
ret [ 'result' ] = None
ret [ 'comment' ] = 'Group {0} is set to be {1}d' . format ( name , mode )
return ret
cret = __salt__ [ 'postgres.group_{0}' . format ( mode ) ] ( groupname = name , createdb = createdb , createroles = createroles , encrypted = encrypted , login = login , inherit = inherit , superuser = superuser , replication = replication , rolepassword = password , groups = groups , ** db_args )
else :
cret = None
if cret :
ret [ 'comment' ] = 'The group {0} has been {1}d' . format ( name , mode )
if update :
ret [ 'changes' ] [ name ] = update
elif cret is not None :
ret [ 'comment' ] = 'Failed to create group {0}' . format ( name )
ret [ 'result' ] = False
else :
ret [ 'result' ] = True
return ret |
def update_launch_config ( self , scaling_group , server_name = None , image = None , flavor = None , disk_config = None , metadata = None , personality = None , networks = None , load_balancers = None , key_name = None , config_drive = False , user_data = None ) :
"""Updates the server launch configuration for an existing scaling group .
One or more of the available attributes can be specified .
NOTE : if you specify metadata , it will * replace * any existing metadata .
If you want to add to it , you either need to pass the complete dict of
metadata , or call the update _ launch _ metadata ( ) method .""" | return self . _manager . update_launch_config ( scaling_group , server_name = server_name , image = image , flavor = flavor , disk_config = disk_config , metadata = metadata , personality = personality , networks = networks , load_balancers = load_balancers , key_name = key_name , config_drive = config_drive , user_data = user_data ) |
def atlas_node_stop ( atlas_state ) :
"""Stop the atlas node threads""" | for component in atlas_state . keys ( ) :
log . debug ( "Stopping Atlas component '%s'" % component )
atlas_state [ component ] . ask_join ( )
atlas_state [ component ] . join ( )
return True |
def _get_repo_file_url ( namespace , filename ) :
"""Return the URL for hosted file in Gluon repository .
Parameters
namespace : str
Namespace of the file .
filename : str
Name of the file""" | return '{base_url}{namespace}/{filename}' . format ( base_url = _get_repo_url ( ) , namespace = namespace , filename = filename ) |
def uniq ( args ) :
"""% prog uniq fasta uniq . fasta
remove fasta records that are the same""" | p = OptionParser ( uniq . __doc__ )
p . add_option ( "--seq" , default = False , action = "store_true" , help = "Uniqify the sequences [default: %default]" )
p . add_option ( "-t" , "--trimname" , dest = "trimname" , action = "store_true" , default = False , help = "turn on the defline trim to first space [default: %default]" )
opts , args = p . parse_args ( args )
if len ( args ) != 2 :
sys . exit ( p . print_help ( ) )
fastafile , uniqfastafile = args
fw = must_open ( uniqfastafile , "w" )
seq = opts . seq
for rec in _uniq_rec ( fastafile , seq = seq ) :
if opts . trimname :
rec . description = ""
SeqIO . write ( [ rec ] , fw , "fasta" ) |
def check_type ( param , datatype ) :
"""Make sure that param is of type datatype and return it .
If param is None , return it .
If param is an instance of datatype , return it .
If param is not an instance of datatype and is not None , cast it as
datatype and return it .""" | if param is None :
return param
if getattr ( datatype , 'clean' , None ) and callable ( datatype . clean ) :
try :
return datatype . clean ( param )
except ValueError :
raise BadArgumentError ( param )
elif isinstance ( datatype , str ) : # You ' ve given it something like ` ' bool ' ` as a string .
# This is the legacy way of doing it .
datatype = { 'str' : str , 'bool' : bool , 'float' : float , 'date' : datetime . date , 'datetime' : datetime . datetime , 'timedelta' : datetime . timedelta , 'json' : 'json' , # exception
'int' : int , } [ datatype ]
if datatype is str and not isinstance ( param , basestring ) :
try :
param = str ( param )
except ValueError :
param = str ( )
elif datatype is int and not isinstance ( param , int ) :
try :
param = int ( param )
except ValueError :
param = int ( )
elif datatype is bool and not isinstance ( param , bool ) :
param = str ( param ) . lower ( ) in ( "true" , "t" , "1" , "y" , "yes" )
elif ( datatype is datetime . datetime and not isinstance ( param , datetime . datetime ) ) :
try :
param = dtutil . string_to_datetime ( param )
except ValueError :
param = None
elif datatype is datetime . date and not isinstance ( param , datetime . date ) :
try :
param = dtutil . string_to_datetime ( param ) . date ( )
except ValueError :
param = None
elif ( datatype is datetime . timedelta and not isinstance ( param , datetime . timedelta ) ) :
try :
param = dtutil . strHoursToTimeDelta ( param )
except ValueError :
param = None
elif datatype == "json" and isinstance ( param , basestring ) :
try :
param = json . loads ( param )
except ValueError :
param = None
return param |
def reset_indent ( token_class ) :
"""Reset the indentation levels .""" | def callback ( lexer , match , context ) :
text = match . group ( )
context . indent_stack = [ ]
context . indent = - 1
context . next_indent = 0
context . block_scalar_indent = None
yield match . start ( ) , token_class , text
context . pos = match . end ( )
return callback |
def get_service_url ( request , redirect_to = None ) :
"""Generates application django service URL for CAS""" | if hasattr ( django_settings , 'CAS_ROOT_PROXIED_AS' ) :
service = django_settings . CAS_ROOT_PROXIED_AS + request . path
else :
protocol = get_protocol ( request )
host = request . get_host ( )
service = urllib_parse . urlunparse ( ( protocol , host , request . path , '' , '' , '' ) , )
if not django_settings . CAS_STORE_NEXT :
if '?' in service :
service += '&'
else :
service += '?'
service += urllib_parse . urlencode ( { REDIRECT_FIELD_NAME : redirect_to or get_redirect_url ( request ) } )
return service |
def _fisher_jenks_means ( values , classes = 5 , sort = True ) :
"""Jenks Optimal ( Natural Breaks ) algorithm implemented in Python .
Notes
The original Python code comes from here :
http : / / danieljlewis . org / 2010/06/07 / jenks - natural - breaks - algorithm - in - python /
and is based on a JAVA and Fortran code available here :
https : / / stat . ethz . ch / pipermail / r - sig - geo / 2006 - March / 000811 . html
Returns class breaks such that classes are internally homogeneous while
assuring heterogeneity among classes .""" | if sort :
values . sort ( )
n_data = len ( values )
mat1 = np . zeros ( ( n_data + 1 , classes + 1 ) , dtype = np . int32 )
mat2 = np . zeros ( ( n_data + 1 , classes + 1 ) , dtype = np . float32 )
mat1 [ 1 , 1 : ] = 1
mat2 [ 2 : , 1 : ] = np . inf
v = np . float32 ( 0 )
for l in range ( 2 , len ( values ) + 1 ) :
s1 = np . float32 ( 0 )
s2 = np . float32 ( 0 )
w = np . float32 ( 0 )
for m in range ( 1 , l + 1 ) :
i3 = l - m + 1
val = np . float32 ( values [ i3 - 1 ] )
s2 += val * val
s1 += val
w += np . float32 ( 1 )
v = s2 - ( s1 * s1 ) / w
i4 = i3 - 1
if i4 != 0 :
for j in range ( 2 , classes + 1 ) :
if mat2 [ l , j ] >= ( v + mat2 [ i4 , j - 1 ] ) :
mat1 [ l , j ] = i3
mat2 [ l , j ] = v + mat2 [ i4 , j - 1 ]
mat1 [ l , 1 ] = 1
mat2 [ l , 1 ] = v
k = len ( values )
kclass = np . zeros ( classes + 1 , dtype = values . dtype )
kclass [ classes ] = values [ len ( values ) - 1 ]
kclass [ 0 ] = values [ 0 ]
for countNum in range ( classes , 1 , - 1 ) :
pivot = mat1 [ k , countNum ]
id = int ( pivot - 2 )
kclass [ countNum - 1 ] = values [ id ]
k = int ( pivot - 1 )
return kclass |
def set_log_level ( self , level , keep = True ) :
"""Set the log level . If keep is True , then it will not change along with
global log changes .""" | self . _set_log_level ( level )
self . _log_level_set_explicitly = keep |
def rename_pickled_ontology ( filename , newname ) :
"""try to rename a cached ontology""" | pickledfile = ONTOSPY_LOCAL_CACHE + "/" + filename + ".pickle"
newpickledfile = ONTOSPY_LOCAL_CACHE + "/" + newname + ".pickle"
if os . path . isfile ( pickledfile ) and not GLOBAL_DISABLE_CACHE :
os . rename ( pickledfile , newpickledfile )
return True
else :
return None |
def _calc_real_and_point ( self ) :
"""Determines the self energy - ( eta / pi ) * * ( 1/2 ) * sum _ { i = 1 } ^ { N } q _ i * * 2""" | fcoords = self . _s . frac_coords
forcepf = 2.0 * self . _sqrt_eta / sqrt ( pi )
coords = self . _coords
numsites = self . _s . num_sites
ereal = np . empty ( ( numsites , numsites ) , dtype = np . float )
forces = np . zeros ( ( numsites , 3 ) , dtype = np . float )
qs = np . array ( self . _oxi_states )
epoint = - qs ** 2 * sqrt ( self . _eta / pi )
for i in range ( numsites ) :
nfcoords , rij , js , _ = self . _s . lattice . get_points_in_sphere ( fcoords , coords [ i ] , self . _rmax , zip_results = False )
# remove the rii term
inds = rij > 1e-8
js = js [ inds ]
rij = rij [ inds ]
nfcoords = nfcoords [ inds ]
qi = qs [ i ]
qj = qs [ js ]
erfcval = erfc ( self . _sqrt_eta * rij )
new_ereals = erfcval * qi * qj / rij
# insert new _ ereals
for k in range ( numsites ) :
ereal [ k , i ] = np . sum ( new_ereals [ js == k ] )
if self . _compute_forces :
nccoords = self . _s . lattice . get_cartesian_coords ( nfcoords )
fijpf = qj / rij ** 3 * ( erfcval + forcepf * rij * np . exp ( - self . _eta * rij ** 2 ) )
forces [ i ] += np . sum ( np . expand_dims ( fijpf , 1 ) * ( np . array ( [ coords [ i ] ] ) - nccoords ) * qi * EwaldSummation . CONV_FACT , axis = 0 )
ereal *= 0.5 * EwaldSummation . CONV_FACT
epoint *= EwaldSummation . CONV_FACT
return ereal , epoint , forces |
def _addconfig ( config , * paths ) :
"""Add path to CONF _ DIRS if exists .""" | for path in paths :
if path is not None and exists ( path ) :
config . append ( path ) |
def get_tasks ( ) :
'''Get a list of known tasks with their routing queue''' | return { name : get_task_queue ( name , cls ) for name , cls in celery . tasks . items ( ) # Exclude celery internal tasks
if not name . startswith ( 'celery.' ) # Exclude udata test tasks
and not name . startswith ( 'test-' ) } |
def variance ( data , data_mean = None ) :
"""Return variance of a sequence of numbers .
: param data _ mean : Precomputed mean of the sequence .""" | data_mean = data_mean or mean ( data )
return sum ( ( x - data_mean ) ** 2 for x in data ) / len ( data ) |
def _add_zoho_token ( self , uri , http_method = "GET" , body = None , headers = None , token_placement = None ) :
"""Add a zoho token to the request uri , body or authorization header . follows bearer pattern""" | headers = self . prepare_zoho_headers ( self . access_token , headers )
return uri , headers , body |
def _pool_event_lifecycle_cb ( conn , pool , event , detail , opaque ) :
'''Storage pool lifecycle events handler''' | _salt_send_event ( opaque , conn , { 'pool' : { 'name' : pool . name ( ) , 'uuid' : pool . UUIDString ( ) } , 'event' : _get_libvirt_enum_string ( 'VIR_STORAGE_POOL_EVENT_' , event ) , 'detail' : 'unknown' # currently unused
} ) |
def leverages ( self , block = 'X' ) :
"""Calculate the leverages for each observation
: return :
: rtype :""" | # TODO check with matlab and simca
try :
if block == 'X' :
return np . dot ( self . scores_t , np . dot ( np . linalg . inv ( np . dot ( self . scores_t . T , self . scores_t ) , self . scores_t . T ) ) )
elif block == 'Y' :
return np . dot ( self . scores_u , np . dot ( np . linalg . inv ( np . dot ( self . scores_u . T , self . scores_u ) , self . scores_u . T ) ) )
else :
raise ValueError
except ValueError as verr :
raise ValueError ( 'block option must be either X or Y' ) |
def loo ( data , pointwise = False , reff = None , scale = "deviance" ) :
"""Pareto - smoothed importance sampling leave - one - out cross - validation .
Calculates leave - one - out ( LOO ) cross - validation for out of sample predictive model fit ,
following Vehtari et al . ( 2017 ) . Cross - validation is computed using Pareto - smoothed
importance sampling ( PSIS ) .
Parameters
data : result of MCMC run
pointwise : bool , optional
if True the pointwise predictive accuracy will be returned . Defaults to False
reff : float , optional
Relative MCMC efficiency , ` effective _ n / n ` i . e . number of effective samples divided by
the number of actual samples . Computed from trace by default .
scale : str
Output scale for loo . Available options are :
- ` deviance ` : ( default ) - 2 * ( log - score )
- ` log ` : 1 * log - score ( after Vehtari et al . ( 2017 ) )
- ` negative _ log ` : - 1 * ( log - score )
Returns
pandas . Series with the following columns :
loo : approximated Leave - one - out cross - validation
loo _ se : standard error of loo
p _ loo : effective number of parameters
shape _ warn : 1 if the estimated shape parameter of
Pareto distribution is greater than 0.7 for one or more samples
loo _ i : array of pointwise predictive accuracy , only if pointwise True
pareto _ k : array of Pareto shape values , only if pointwise True
loo _ scale : scale of the loo results""" | inference_data = convert_to_inference_data ( data )
for group in ( "posterior" , "sample_stats" ) :
if not hasattr ( inference_data , group ) :
raise TypeError ( "Must be able to extract a {group}" "group from data!" . format ( group = group ) )
if "log_likelihood" not in inference_data . sample_stats :
raise TypeError ( "Data must include log_likelihood in sample_stats" )
posterior = inference_data . posterior
log_likelihood = inference_data . sample_stats . log_likelihood
n_samples = log_likelihood . chain . size * log_likelihood . draw . size
new_shape = ( n_samples , ) + log_likelihood . shape [ 2 : ]
log_likelihood = log_likelihood . values . reshape ( * new_shape )
if scale . lower ( ) == "deviance" :
scale_value = - 2
elif scale . lower ( ) == "log" :
scale_value = 1
elif scale . lower ( ) == "negative_log" :
scale_value = - 1
else :
raise TypeError ( 'Valid scale values are "deviance", "log", "negative_log"' )
if reff is None :
n_chains = len ( posterior . chain )
if n_chains == 1 :
reff = 1.0
else :
ess = effective_sample_size ( posterior )
# this mean is over all data variables
reff = np . hstack ( [ ess [ v ] . values . flatten ( ) for v in ess . data_vars ] ) . mean ( ) / n_samples
log_weights , pareto_shape = psislw ( - log_likelihood , reff )
log_weights += log_likelihood
warn_mg = 0
if np . any ( pareto_shape > 0.7 ) :
warnings . warn ( """Estimated shape parameter of Pareto distribution is greater than 0.7 for
one or more samples. You should consider using a more robust model, this is because
importance sampling is less likely to work well if the marginal posterior and LOO posterior
are very different. This is more likely to happen with a non-robust model and highly
influential observations.""" )
warn_mg = 1
loo_lppd_i = scale_value * _logsumexp ( log_weights , axis = 0 )
loo_lppd = loo_lppd_i . sum ( )
loo_lppd_se = ( len ( loo_lppd_i ) * np . var ( loo_lppd_i ) ) ** 0.5
lppd = np . sum ( _logsumexp ( log_likelihood , axis = 0 , b_inv = log_likelihood . shape [ 0 ] ) )
p_loo = lppd - loo_lppd / scale_value
if pointwise :
if np . equal ( loo_lppd , loo_lppd_i ) . all ( ) : # pylint : disable = no - member
warnings . warn ( """The point-wise LOO is the same with the sum LOO, please double check
the Observed RV in your model to make sure it returns element-wise logp.
""" )
return pd . Series ( data = [ loo_lppd , loo_lppd_se , p_loo , warn_mg , loo_lppd_i , pareto_shape , scale ] , index = [ "loo" , "loo_se" , "p_loo" , "warning" , "loo_i" , "pareto_k" , "loo_scale" ] , )
else :
return pd . Series ( data = [ loo_lppd , loo_lppd_se , p_loo , warn_mg , scale ] , index = [ "loo" , "loo_se" , "p_loo" , "warning" , "loo_scale" ] , ) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.