signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def get_slice ( self , key , column_parent , predicate , consistency_level ) : """Get the group of columns contained by column _ parent ( either a ColumnFamily name or a ColumnFamily / SuperColumn name pair ) specified by the given SlicePredicate . If no matching values are found , an empty list is returned . Parameters : - key - column _ parent - predicate - consistency _ level"""
self . _seqid += 1 d = self . _reqs [ self . _seqid ] = defer . Deferred ( ) self . send_get_slice ( key , column_parent , predicate , consistency_level ) return d
def _GetMemberForOffset ( self , offset ) : """Finds the member whose data includes the provided offset . Args : offset ( int ) : offset in the uncompressed data to find the containing member for . Returns : gzipfile . GzipMember : gzip file member or None if not available . Raises : ValueError : if the provided offset is outside of the bounds of the uncompressed data ."""
if offset < 0 or offset >= self . uncompressed_data_size : raise ValueError ( 'Offset {0:d} is larger than file size {1:d}.' . format ( offset , self . uncompressed_data_size ) ) for end_offset , member in iter ( self . _members_by_end_offset . items ( ) ) : if offset < end_offset : return member return None
def atlas_zonefile_push_enqueue ( zonefile_hash , name , txid , zonefile_data , zonefile_queue = None , con = None , path = None ) : """Enqueue the given zonefile into our " push " queue , from which it will be replicated to storage and sent out to other peers who don ' t have it . Return True if we enqueued it Return False if not"""
res = False bits = atlasdb_get_zonefile_bits ( zonefile_hash , path = path , con = con ) if len ( bits ) == 0 : # invalid hash return with AtlasZonefileQueueLocked ( zonefile_queue ) as zfq : if len ( zfq ) < MAX_QUEUED_ZONEFILES : zfdata = { 'zonefile_hash' : zonefile_hash , 'zonefile' : zonefile_data , 'name' : name , 'txid' : txid } zfq . append ( zfdata ) res = True return res
def receive_message ( self , msg ) : """Responds to messages from other participants ."""
if isinstance ( msg , Resolution ) : return paxos = self . paxos_instance while msg : if isinstance ( msg , Resolution ) : self . print_if_verbose ( "{} resolved value {}" . format ( self . network_uid , msg . value ) ) break else : self . print_if_verbose ( "{} <- {} <- {}" . format ( self . network_uid , msg . __class__ . __name__ , msg . from_uid ) ) msg = paxos . receive ( msg ) # Todo : Make it optional not to announce resolution ( without which it ' s hard to see final value ) . do_announce_resolution = True if msg and ( do_announce_resolution or not isinstance ( msg , Resolution ) ) : self . announce ( msg ) self . setattrs_from_paxos ( paxos )
def _build_index ( maf_strm , ref_spec ) : """Build an index for a MAF genome alig file and return StringIO of it ."""
idx_strm = StringIO . StringIO ( ) bound_iter = functools . partial ( genome_alignment_iterator , reference_species = ref_spec ) hash_func = JustInTimeGenomeAlignmentBlock . build_hash idx = IndexedFile ( maf_strm , bound_iter , hash_func ) idx . write_index ( idx_strm ) idx_strm . seek ( 0 ) # seek to the start return idx_strm
def h_kinetic ( T , P , MW , Hvap , f = 1 ) : r'''Calculates heat transfer coefficient for condensation of a pure chemical inside a vertical tube or tube bundle , as presented in [2 ] _ according to [ 1 ] _ . . . math : : h = \ left ( \ frac { 2f } { 2 - f } \ right ) \ left ( \ frac { MW } { 1000 \ cdot 2 \ pi R T } \ right ) ^ { 0.5 } \ left ( \ frac { H _ { vap } ^ 2 P \ cdot MW } { 1000 \ cdot RT ^ 2 } \ right ) Parameters T : float Vapor temperature , [ K ] P : float Vapor pressure , [ Pa ] MW : float Molecular weight of the gas , [ g / mol ] Hvap : float Heat of vaporization of the fluid at P , [ J / kg ] f : float Correction factor , [ - ] Returns h : float Heat transfer coefficient [ W / m ^ 2 / K ] Notes f is a correction factor for how the removal of gas particles affects the behavior of the ideal gas in diffusing to the condensing surface . It is quite close to one , and has not been well explored in the literature due to the rarity of the importance of the kinetic resistance . Examples Water at 1 bar and 300 K : > > > h _ kinetic ( 300 , 1E5 , 18.02 , 2441674) 30788845.562480535 References . . [ 1 ] Berman , L . D . " On the Effect of Molecular - Kinetic Resistance upon Heat Transfer with Condensation . " International Journal of Heat and Mass Transfer 10 , no . 10 ( October 1 , 1967 ) : 1463. doi : 10.1016/0017-9310(67)90033-6. . . [ 2 ] Kakaç , Sadik , ed . Boilers , Evaporators , and Condensers . 1 edition . Wiley - Interscience , 1991. . . [ 3 ] Stephan , Karl . Heat Transfer in Condensation and Boiling . Translated by C . V . Green . Softcover reprint of the original 1st ed . 1992 edition . Berlin ; New York : Springer , 2013.'''
return ( 2 * f ) / ( 2 - f ) * ( MW / ( 1000 * 2 * pi * R * T ) ) ** 0.5 * ( Hvap ** 2 * P * MW ) / ( 1000 * R * T ** 2 )
def temperature ( self ) : """Read the value for the internal temperature sensor . : returns : Temperature in degree celcius as float : Example : > > > sensor = MPU6050I2C ( gw ) > > > sensor . wakeup ( ) > > > sensor . temperature ( ) 49.38"""
if not self . awake : raise Exception ( "MPU6050 is in sleep mode, use wakeup()" ) raw = self . i2c_read_register ( 0x41 , 2 ) raw = struct . unpack ( '>h' , raw ) [ 0 ] return round ( ( raw / 340 ) + 36.53 , 2 )
def _get_web_auth_token ( self ) : """Retrieves a token from the network for web authentication . The token then has to be authorized from getAuthURL before creating session ."""
request = _Request ( self . network , "auth.getToken" ) # default action is that a request is signed only when # a session key is provided . request . sign_it ( ) doc = request . execute ( ) e = doc . getElementsByTagName ( "token" ) [ 0 ] return e . firstChild . data
def _parse_multi_byte ( self , s ) : # type : ( str ) - > int """_ parse _ multi _ byte parses x as a multibyte representation to get the int value of this AbstractUVarIntField . @ param str s : the multibyte string to parse . @ return int : The parsed int value represented by this AbstractUVarIntField . # noqa : E501 @ raise : AssertionError @ raise : Scapy _ Exception if the input value encodes an integer larger than 1 < < 64 # noqa : E501"""
assert ( len ( s ) >= 2 ) tmp_len = len ( s ) value = 0 i = 1 byte = orb ( s [ i ] ) # For CPU sake , stops at an arbitrary large number ! max_value = 1 << 64 # As long as the MSG is set , an another byte must be read while byte & 0x80 : value += ( byte ^ 0x80 ) << ( 7 * ( i - 1 ) ) if value > max_value : raise error . Scapy_Exception ( 'out-of-bound value: the string encodes a value that is too large (>2^{64}): {}' . format ( value ) # noqa : E501 ) i += 1 assert i < tmp_len , 'EINVAL: x: out-of-bound read: the string ends before the AbstractUVarIntField!' # noqa : E501 byte = orb ( s [ i ] ) value += byte << ( 7 * ( i - 1 ) ) value += self . _max_value assert ( value >= 0 ) return value
def grid_search ( grid_scores , change , subset = None , kind = 'line' , cmap = None , ax = None ) : """Plot results from a sklearn grid search by changing two parameters at most . Parameters grid _ scores : list of named tuples Results from a sklearn grid search ( get them using the ` grid _ scores _ ` parameter ) change : str or iterable with len < = 2 Parameter to change subset : dictionary - like parameter - value ( s ) pairs to subset from grid _ scores . ( e . g . ` ` { ' n _ estimartors ' : [ 1 , 10 ] } ` ` ) , if None all combinations will be used . kind : [ ' line ' , ' bar ' ] This only applies whe change is a single parameter . Changes the type of plot cmap : matplotlib Colormap This only applies when change are two parameters . Colormap used for the matrix . If None uses a modified version of matplotlib ' s OrRd colormap . ax : matplotlib Axes Axes object to draw the plot onto , otherwise uses current Axes Returns ax : matplotlib Axes Axes containing the plot Examples . . plot : : . . / . . / examples / grid _ search . py"""
if change is None : raise ValueError ( ( 'change can\'t be None, you need to select at least' ' one value to make the plot.' ) ) if ax is None : ax = plt . gca ( ) if cmap is None : cmap = default_heatmap ( ) if isinstance ( change , string_types ) or len ( change ) == 1 : return _grid_search_single ( grid_scores , change , subset , kind , ax ) elif len ( change ) == 2 : return _grid_search_double ( grid_scores , change , subset , cmap , ax ) else : raise ValueError ( 'change must have length 1 or 2 or be a string' )
def median ( self , default = None ) : """Calculate the median value over the time series . : param default : Value to return as a default should the calculation not be possible . : return : Float representing the median value or ` None ` ."""
return numpy . asscalar ( numpy . median ( self . values ) ) if self . values else default
def get_or_create_vocab ( self , data_dir , tmp_dir , force_get = False ) : """Get vocab for distill problems ."""
# We assume that vocab file is present in data _ dir directory where the # data generated will be stored . vocab_filepath = os . path . join ( data_dir , self . vocab_filename ) encoder = text_encoder . SubwordTextEncoder ( vocab_filepath ) return encoder
def get_user ( self , username ) : """Gest a specific user"""
ret = { } tmp = self . _get_user ( self . _byte_p2 ( username ) , ALL_ATTRS ) if tmp is None : raise UserDoesntExist ( username , self . backend_name ) attrs_tmp = tmp [ 1 ] for attr in attrs_tmp : value_tmp = attrs_tmp [ attr ] if len ( value_tmp ) == 1 : ret [ attr ] = value_tmp [ 0 ] else : ret [ attr ] = value_tmp return ret
def WriteFileHash ( self , path , hash_value ) : """Writes the file path and hash to stdout . Args : path ( str ) : path of the file . hash _ value ( str ) : message digest hash calculated over the file data ."""
string = '{0:s}\t{1:s}' . format ( hash_value , path ) encoded_string = self . _EncodeString ( string ) print ( encoded_string )
def _get_dynamic_field_for ( cls , field_name ) : """Return the dynamic field within this class that match the given name . Keep an internal cache to speed up future calls wieh same field name . ( The cache store the field for each individual class and subclasses , to keep the link between a field and its direct model )"""
from . fields import DynamicFieldMixin # here to avoid circular import if cls not in ModelWithDynamicFieldMixin . _dynamic_fields_cache : ModelWithDynamicFieldMixin . _dynamic_fields_cache [ cls ] = { } if field_name not in ModelWithDynamicFieldMixin . _dynamic_fields_cache [ cls ] : ModelWithDynamicFieldMixin . _dynamic_fields_cache [ cls ] [ field_name ] = None for a_field_name in cls . _fields : field = cls . get_field ( a_field_name ) if isinstance ( field , DynamicFieldMixin ) and field . _accept_name ( field_name ) : ModelWithDynamicFieldMixin . _dynamic_fields_cache [ cls ] [ field_name ] = field break field = ModelWithDynamicFieldMixin . _dynamic_fields_cache [ cls ] [ field_name ] if field is None : raise ValueError ( 'No DynamicField matching "%s"' % field_name ) return field
def _parse_nationality ( self , player_info ) : """Parse the player ' s nationality . The player ' s nationality is denoted by a flag in the information section with a country code for each nation . The country code needs to pulled and then matched to find the player ' s home country . Once found , the ' _ nationality ' attribute is set for the player . Parameters player _ info : PyQuery object A PyQuery object containing the HTML from the player ' s stats page ."""
for span in player_info ( 'span' ) . items ( ) : if 'class="f-i' in str ( span ) : nationality = span . text ( ) nationality = NATIONALITY [ nationality ] setattr ( self , '_nationality' , nationality ) break
def parse_args ( ) : """Argument parser and validator"""
parser = argparse . ArgumentParser ( description = "Uploads specified VMDK file to AWS s3 bucket, and converts to AMI" ) parser . add_argument ( '-r' , '--aws_regions' , type = str , nargs = '+' , required = True , help = 'list of AWS regions where uploaded ami should be copied. Available' ' regions: {}.' . format ( AWSUtilities . aws_regions ) ) parser . add_argument ( '-a' , '--aws_profile' , type = str , required = True , help = 'AWS profile name to use for aws cli commands' ) parser . add_argument ( '-b' , '--s3_bucket' , type = str , required = True , help = 'The aws_bucket of the profile to upload and save vmdk to' ) parser . add_argument ( '-f' , '--vmdk_upload_file' , type = str , required = True , help = "The file to upload if executing " ) parser . add_argument ( '-n' , '--ami_name' , type = str , required = False , help = 'The name to give to the uploaded ami. ' 'Defaults to the name of the file' ) parser . add_argument ( '-d' , '--directory' , type = str , default = tempfile . mkdtemp ( ) , help = 'Directory to save temp aws config upload files' ) args = parser . parse_args ( ) if not args . ami_name : args . ami_name = os . path . basename ( args . vmdk_upload_file ) validate_args ( args ) return args
def delete ( identifier , files = None , formats = None , glob_pattern = None , cascade_delete = None , access_key = None , secret_key = None , verbose = None , debug = None , ** kwargs ) : """Delete files from an item . Note : Some system files , such as < itemname > _ meta . xml , cannot be deleted . : type identifier : str : param identifier : The globally unique Archive . org identifier for a given item . : param files : ( optional ) Only return files matching the given filenames . : param formats : ( optional ) Only return files matching the given formats . : type glob _ pattern : str : param glob _ pattern : ( optional ) Only return files matching the given glob pattern . : type cascade _ delete : bool : param cascade _ delete : ( optional ) Also deletes files derived from the file , and files the filewas derived from . : type access _ key : str : param access _ key : ( optional ) IA - S3 access _ key to use when making the given request . : type secret _ key : str : param secret _ key : ( optional ) IA - S3 secret _ key to use when making the given request . : type verbose : bool : param verbose : Print actions to stdout . : type debug : bool : param debug : ( optional ) Set to True to print headers to stdout and exit exit without sending the delete request ."""
files = get_files ( identifier , files , formats , glob_pattern , ** kwargs ) responses = [ ] for f in files : r = f . delete ( cascade_delete = cascade_delete , access_key = access_key , secret_key = secret_key , verbose = verbose , debug = debug ) responses . append ( r ) return responses
def set_property ( obj , name , value ) : """Recursively sets value of object and its subobjects property specified by its name . The object can be a user defined object , map or array . The property name correspondently must be object property , map key or array index . If the property does not exist or introspection fails this method doesn ' t do anything and doesn ' t any throw errors . : param obj : an object to write property to . : param name : a name of the property to set . : param value : a new value for the property to set ."""
if obj == None or name == None : return names = name . split ( "." ) if names == None or len ( names ) == 0 : return RecursiveObjectWriter . _perform_set_property ( obj , names , 0 , value )
def add_journal ( self , units = None , boot = None , since = None , until = None , lines = None , allfields = False , output = None , timeout = None , identifier = None , catalog = None , sizelimit = None , pred = None ) : """Collect journald logs from one of more units . : param units : A string , or list of strings specifying the systemd units for which journal entries will be collected . : param boot : A string selecting a boot index using the journalctl syntax . The special values ' this ' and ' last ' are also accepted . : param since : A string representation of the start time for journal messages . : param until : A string representation of the end time for journal messages . : param lines : The maximum number of lines to be collected . : param allfields : A bool . Include all journal fields regardless of size or non - printable characters . : param output : A journalctl output control string , for example " verbose " . : param timeout : An optional timeout in seconds . : param identifier : An optional message identifier . : param catalog : Bool . If True , augment lines with descriptions from the system catalog . : param sizelimit : Limit to the size of output returned in MB . Defaults to the value of - - log - size ."""
journal_cmd = "journalctl --no-pager " unit_opt = " --unit %s" boot_opt = " --boot %s" since_opt = " --since %s" until_opt = " --until %s" lines_opt = " --lines %s" output_opt = " --output %s" identifier_opt = " --identifier %s" catalog_opt = " --catalog" journal_size = 100 all_logs = self . get_option ( "all_logs" ) log_size = sizelimit or self . get_option ( "log_size" ) log_size = max ( log_size , journal_size ) if not all_logs else 0 if isinstance ( units , six . string_types ) : units = [ units ] if units : for unit in units : journal_cmd += unit_opt % unit if identifier : journal_cmd += identifier_opt % identifier if catalog : journal_cmd += catalog_opt if allfields : journal_cmd += " --all" if boot : if boot == "this" : boot = "" if boot == "last" : boot = "-1" journal_cmd += boot_opt % boot if since : journal_cmd += since_opt % since if until : journal_cmd += until_opt % until if lines : journal_cmd += lines_opt % lines if output : journal_cmd += output_opt % output self . _log_debug ( "collecting journal: %s" % journal_cmd ) self . _add_cmd_output ( journal_cmd , timeout = timeout , sizelimit = log_size , pred = pred )
def can_document_member ( cls , member , membername , isattr , parent ) : """Called to see if a member can be documented by this documenter ."""
if not super ( ) . can_document_member ( member , membername , isattr , parent ) : return False return iscoroutinefunction ( member )
def draw_rects ( self , * rects ) : """Draw some number of rectangles on the current rendering target . Args : * rects ( Rect ) : The destination rectangles . Raises : SDLError : If an error is encountered ."""
rect_array = ffi . new ( 'SDL_Rect[]' , len ( rects ) ) for i , r in enumerate ( rects ) : rect_array [ i ] = r . _ptr [ 0 ] check_int_err ( lib . SDL_RenderDrawRects ( self . _ptr , rect_array , len ( rects ) ) )
def get_params ( ) : """get the cmdline params"""
parser = argparse . ArgumentParser ( ) parser . add_argument ( "--connect-timeout" , type = float , default = 10.0 , help = "ZK connect timeout" ) parser . add_argument ( "--run-once" , type = str , default = "" , help = "Run a command non-interactively and exit" ) parser . add_argument ( "--run-from-stdin" , action = "store_true" , default = False , help = "Read cmds from stdin, run them and exit" ) parser . add_argument ( "--sync-connect" , action = "store_true" , default = False , help = "Connect synchronously." ) parser . add_argument ( "--readonly" , action = "store_true" , default = False , help = "Enable readonly." ) parser . add_argument ( "--tunnel" , type = str , help = "Create a ssh tunnel via this host" , default = None ) parser . add_argument ( "--version" , action = "store_true" , default = False , help = "Display version and exit." ) parser . add_argument ( "hosts" , nargs = "*" , help = "ZK hosts to connect" ) params = parser . parse_args ( ) return CLIParams ( params . connect_timeout , params . run_once , params . run_from_stdin , params . sync_connect , params . hosts , params . readonly , params . tunnel , params . version )
def _get_image_for_different_arch ( self , image , platform ) : """Get image from random arch This is a workaround for aarch64 platform , because orchestrator cannot get this arch from manifests lists so we have to provide digest of a random platform to get image metadata for orchestrator . For standard platforms like x86_64 , ppc64le , . . . this method returns the corresponding digest"""
parents_digests = self . workflow . builder . parent_images_digests try : digests = parents_digests . get_image_digests ( image ) except KeyError : return None if not digests : return None platform_digest = digests . get ( platform ) if platform_digest is None : # exact match is not found , get random platform platform_digest = tuple ( digests . values ( ) ) [ 0 ] new_image = ImageName . parse ( platform_digest ) return new_image
def _multihop_xml ( self , ** kwargs ) : """Build BGP multihop XML . Do not use this method directly . You probably want ` ` multihop ` ` . Args : rbridge _ id ( str ) : The rbridge ID of the device on which BGP will be configured in a VCS fabric . neighbor ( ipaddress . ip _ interface ) : ` ip _ interface ` object containing peer IP address ( IPv4 or IPv6 ) . count ( str ) : Number of hops to allow . ( 1-255) Returns : ` ` ElementTree ` ` : XML for configuring BGP multihop . Raises : KeyError : if any arg is not specified . Examples : > > > import pynos . device > > > from ipaddress import ip _ interface > > > conn = ( ' 10.24.39.230 ' , ' 22 ' ) > > > auth = ( ' admin ' , ' password ' ) > > > with pynos . device . Device ( conn = conn , auth = auth ) as dev : . . . dev . bgp . _ multihop _ xml ( neighbor = ip _ interface ( unicode ( . . . ' 10.10.10.10 ' ) ) , count = ' 5 ' , vrf = ' default ' , rbridge _ id = ' 1 ' ) . . . dev . bgp . _ multihop _ xml ( . . . ip = ' 10.10.10.10 ' ) # doctest : + IGNORE _ EXCEPTION _ DETAIL Traceback ( most recent call last ) : KeyError"""
ip_addr = kwargs . pop ( 'neighbor' ) ip = str ( ip_addr . ip ) rbr_ns = 'urn:brocade.com:mgmt:brocade-rbridge' bgp_ns = 'urn:brocade.com:mgmt:brocade-bgp' config = ET . Element ( 'config' ) ele = ET . SubElement ( config , 'rbridge-id' , xmlns = rbr_ns ) ET . SubElement ( ele , 'rbridge-id' ) . text = kwargs . pop ( 'rbridge_id' ) ele = ET . SubElement ( ele , 'router' ) ele = ET . SubElement ( ele , 'router-bgp' , xmlns = bgp_ns ) ele = ET . SubElement ( ele , 'router-bgp-attributes' ) ele = ET . SubElement ( ele , 'neighbor' ) if ip_addr . version == 4 : ele = ET . SubElement ( ele , 'neighbor-ips' ) ele = ET . SubElement ( ele , 'neighbor-addr' ) ET . SubElement ( ele , 'router-bgp-neighbor-address' ) . text = ip else : ele = ET . SubElement ( ele , 'neighbor-ipv6s' ) ele = ET . SubElement ( ele , 'neighbor-ipv6-addr' ) ET . SubElement ( ele , 'router-bgp-neighbor-ipv6-address' ) . text = ip ele = ET . SubElement ( ele , 'ebgp-multihop' ) ET . SubElement ( ele , 'ebgp-multihop-count' ) . text = kwargs . pop ( 'count' ) return config
def _count_localizations ( df ) : """count the most likely localization for each depentent peptide . : param df : allPeptides . txt table ."""
grp = df . groupby ( _index_columns ) counts = grp [ 'DP AA' ] . apply ( lambda x : count ( x . str . split ( ';' ) . values ) ) counts . index = counts . index . set_names ( 'DP AA' , level = 4 ) counts . name = 'DP AA count' best_localization = counts . reset_index ( ) . groupby ( _index_columns ) . apply ( _frequent_localizations ) return best_localization
def _match_and_pop ( self , regex_pattern ) : """Pop one event from each of the event queues whose names match ( in a sense of regular expression ) regex _ pattern ."""
results = [ ] self . lock . acquire ( ) for name in self . event_dict . keys ( ) : if re . match ( regex_pattern , name ) : q = self . event_dict [ name ] if q : try : results . append ( q . get ( False ) ) except : pass self . lock . release ( ) return results
def rename_unzipped_folder ( version ) : """Renames unzipped spark version folder to the release tag . : param version : version from release tag . : return :"""
for filename in os . listdir ( Spark . svm_path ( ) ) : if fnmatch . fnmatch ( filename , 'apache-spark-*' ) : return os . rename ( os . path . join ( Spark . svm_path ( ) , filename ) , Spark . svm_version_path ( version ) ) raise SparkInstallationError ( "Unable to find unzipped Spark folder in {}" . format ( Spark . svm_path ( ) ) )
def colour ( colour , message , bold = False ) : """Color a message"""
return style ( fg = colour , text = message , bold = bold )
def _project_on_ellipsoid ( c , r , locations ) : """displace locations to the nearest point on ellipsoid surface"""
p0 = locations - c # original locations l2 = 1 / np . sum ( p0 ** 2 / r ** 2 , axis = 1 , keepdims = True ) p = p0 * np . sqrt ( l2 ) # initial approximation ( projection of points towards center of ellipsoid ) fun = lambda x : np . sum ( ( x . reshape ( p0 . shape ) - p0 ) ** 2 ) # minimize distance between new and old points con = lambda x : np . sum ( x . reshape ( p0 . shape ) ** 2 / r ** 2 , axis = 1 ) - 1 # new points constrained to surface of ellipsoid res = sp . optimize . minimize ( fun , p , constraints = { 'type' : 'eq' , 'fun' : con } , method = 'SLSQP' ) return res [ 'x' ] . reshape ( p0 . shape ) + c
def update_resources ( self , cpu , gpu , ** kwargs ) : """EXPERIMENTAL : Updates the resource requirements . Should only be called when the trial is not running . Raises : ValueError if trial status is running ."""
if self . status is Trial . RUNNING : raise ValueError ( "Cannot update resources while Trial is running." ) self . resources = Resources ( cpu , gpu , ** kwargs )
def affiliation_history ( self ) : """List of ScopusAffiliation objects representing former affiliations of the author . Only affiliations with more than one publication are considered ."""
aff_ids = [ e . attrib . get ( 'affiliation-id' ) for e in self . xml . findall ( 'author-profile/affiliation-history/affiliation' ) if e is not None and len ( list ( e . find ( "ip-doc" ) . iter ( ) ) ) > 1 ] return [ ScopusAffiliation ( aff_id ) for aff_id in aff_ids ]
def _set_cmap_seq ( self , v , load = False ) : """Setter method for cmap _ seq , mapped from YANG variable / overlay _ class _ map / cmap _ seq ( list ) If this variable is read - only ( config : false ) in the source YANG file , then _ set _ cmap _ seq is considered as a private method . Backends looking to populate this variable should do so via calling thisObj . _ set _ cmap _ seq ( ) directly ."""
if hasattr ( v , "_utype" ) : v = v . _utype ( v ) try : t = YANGDynClass ( v , base = YANGListType ( "cmap_seq_num" , cmap_seq . cmap_seq , yang_name = "cmap-seq" , rest_name = "seq" , parent = self , is_container = 'list' , user_ordered = False , path_helper = self . _path_helper , yang_keys = 'cmap-seq-num' , extensions = { u'tailf-common' : { u'info' : u'Sequence number' , u'cli-no-key-completion' : None , u'alt-name' : u'seq' , u'cli-suppress-list-no' : None , u'cli-full-no' : None , u'cli-compact-syntax' : None , u'cli-sequence-commands' : None , u'cli-suppress-mode' : None , u'cli-suppress-key-abbreviation' : None , u'cli-incomplete-command' : None , u'callpoint' : u'OverlayClassMapRuleCallPoint' } } ) , is_container = 'list' , yang_name = "cmap-seq" , rest_name = "seq" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'info' : u'Sequence number' , u'cli-no-key-completion' : None , u'alt-name' : u'seq' , u'cli-suppress-list-no' : None , u'cli-full-no' : None , u'cli-compact-syntax' : None , u'cli-sequence-commands' : None , u'cli-suppress-mode' : None , u'cli-suppress-key-abbreviation' : None , u'cli-incomplete-command' : None , u'callpoint' : u'OverlayClassMapRuleCallPoint' } } , namespace = 'urn:brocade.com:mgmt:brocade-overlay-policy' , defining_module = 'brocade-overlay-policy' , yang_type = 'list' , is_config = True ) except ( TypeError , ValueError ) : raise ValueError ( { 'error-string' : """cmap_seq must be of a type compatible with list""" , 'defined-type' : "list" , 'generated-type' : """YANGDynClass(base=YANGListType("cmap_seq_num",cmap_seq.cmap_seq, yang_name="cmap-seq", rest_name="seq", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='cmap-seq-num', extensions={u'tailf-common': {u'info': u'Sequence number', u'cli-no-key-completion': None, u'alt-name': u'seq', u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-suppress-mode': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'OverlayClassMapRuleCallPoint'}}), is_container='list', yang_name="cmap-seq", rest_name="seq", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Sequence number', u'cli-no-key-completion': None, u'alt-name': u'seq', u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-suppress-mode': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'OverlayClassMapRuleCallPoint'}}, namespace='urn:brocade.com:mgmt:brocade-overlay-policy', defining_module='brocade-overlay-policy', yang_type='list', is_config=True)""" , } ) self . __cmap_seq = t if hasattr ( self , '_set' ) : self . _set ( )
def search ( self , start_ts , end_ts ) : """Called to query Solr for documents in a time range ."""
query = '_ts: [%s TO %s]' % ( start_ts , end_ts ) return self . _stream_search ( query )
def train_batch ( batch_list , context , network , gluon_trainer ) : """Training with multiple GPUs Parameters batch _ list : List list of dataset context : List a list of all GPUs to be used for training network : ResNet gluon _ trainer : rain module of gluon"""
# Split and load data into multiple GPUs data = batch_list [ 0 ] data = gluon . utils . split_and_load ( data , context ) # Split and load label into multiple GPUs label = batch_list [ 1 ] label = gluon . utils . split_and_load ( label , context ) # Run the forward and backward pass forward_backward ( network , data , label ) # Update the parameters this_batch_size = batch_list [ 0 ] . shape [ 0 ] gluon_trainer . step ( this_batch_size )
def _check_download_dir ( link , download_dir , hashes ) : # type : ( Link , str , Hashes ) - > Optional [ str ] """Check download _ dir for previously downloaded file with correct hash If a correct file is found return its path else None"""
download_path = os . path . join ( download_dir , link . filename ) if os . path . exists ( download_path ) : # If already downloaded , does its hash match ? logger . info ( 'File was already downloaded %s' , download_path ) if hashes : try : hashes . check_against_path ( download_path ) except HashMismatch : logger . warning ( 'Previously-downloaded file %s has bad hash. ' 'Re-downloading.' , download_path ) os . unlink ( download_path ) return None return download_path return None
def to_odict ( self , exclude = None ) : """Returns an OrderedDict representation of the SQLalchemy table row ."""
if exclude is None : exclude = tuple ( ) colnames = [ c . name for c in self . __table__ . columns if c . name not in exclude ] return OrderedDict ( ( ( col , getattr ( self , col ) ) for col in colnames ) )
def ListVoices ( voice_spec = None ) : '''Reads the voice files from espeak - data / voices and returns a list of VOICE objects . If voice _ spec is None then all voices are listed . If voice spec is given , then only the voices which are compatible with the voice _ spec are listed , and they are listed in preference order .'''
ppv = cListVoices ( voice_spec ) res = [ ] i = 0 while ppv [ i ] : res . append ( ppv [ i ] [ 0 ] ) i += 1 return res
def is_end_node ( node ) : """Checks if a node is the " end " keyword . Args : node : AST node . Returns : True if the node is the " end " keyword , otherwise False ."""
return ( isinstance ( node , ast . Expr ) and isinstance ( node . value , ast . Name ) and node . value . id == 'end' )
def _new_redis_client ( self ) : """Create a new redis client and assign it the class _ redis _ client attribute for reuse across requests . : rtype : tornadoredis . Client ( )"""
if 'tornadoredis' not in globals ( ) : import tornadoredis kwargs = self . _redis_connection_settings ( ) LOGGER . info ( 'Connecting to %(host)s:%(port)s DB %(selected_db)s' , kwargs ) return tornadoredis . Client ( ** kwargs )
def execute ( self , statement , parameters = None ) : """Execute statement on database : param statement : a valid SQL statement : param parameters : a list / tuple of parameters : returns : this cursor In order to be compatible with Python ' s DBAPI five parameter styles must be supported . paramstyleMeaning 1 ) qmark Question mark style , e . g . . . . WHERE name = ? 2 ) numeric Numeric , positional style , e . g . . . . WHERE name = : 1 3 ) named Named style , e . g . . . . WHERE name = : name 4 ) format ANSI C printf format codes , e . g . . . . WHERE name = % s 5 ) pyformat Python extended format codes , e . g . . . . WHERE name = % ( name ) s Hana ' s ' prepare statement ' feature supports 1 ) and 2 ) , while 4 and 5 are handle by Python ' s own string expansion mechanism . Note that case 3 is not yet supported by this method !"""
self . _check_closed ( ) if not parameters : # Directly execute the statement , nothing else to prepare : self . _execute_direct ( statement ) else : self . executemany ( statement , parameters = [ parameters ] ) return self
def flush ( self ) : """Remove all cached objects from the database ."""
keys = list ( self . keys ( ) ) if keys : return self . database . delete ( * keys )
def calcFontScaling ( self ) : '''Calculates the current font size and left position for the current window .'''
self . ypx = self . figure . get_size_inches ( ) [ 1 ] * self . figure . dpi self . xpx = self . figure . get_size_inches ( ) [ 0 ] * self . figure . dpi self . fontSize = self . vertSize * ( self . ypx / 2.0 ) self . leftPos = self . axes . get_xlim ( ) [ 0 ] self . rightPos = self . axes . get_xlim ( ) [ 1 ]
def assign_assessment_taken_to_bank ( self , assessment_taken_id , bank_id ) : """Adds an existing ` ` AssessmentTaken ` ` to a ` ` Bank ` ` . arg : assessment _ taken _ id ( osid . id . Id ) : the ` ` Id ` ` of the ` ` AssessmentTaken ` ` arg : bank _ id ( osid . id . Id ) : the ` ` Id ` ` of the ` ` Bank ` ` raise : AlreadyExists - ` ` assessment _ taken _ id ` ` is already assigned to ` ` bank _ id ` ` raise : NotFound - ` ` assessment _ taken _ id ` ` or ` ` bank _ id ` ` not found raise : NullArgument - ` ` assessment _ taken _ id ` ` or ` ` bank _ id ` ` is ` ` null ` ` raise : OperationFailed - unable to complete request raise : PermissionDenied - authorization failure occurred * compliance : mandatory - - This method must be implemented . *"""
# Implemented from template for # osid . resource . ResourceBinAssignmentSession . assign _ resource _ to _ bin mgr = self . _get_provider_manager ( 'ASSESSMENT' , local = True ) lookup_session = mgr . get_bank_lookup_session ( proxy = self . _proxy ) lookup_session . get_bank ( bank_id ) # to raise NotFound self . _assign_object_to_catalog ( assessment_taken_id , bank_id )
def _parse_the_ned_position_results ( self , ra , dec , nedResults ) : """* parse the ned results * * * Key Arguments : * * - ` ` ra ` ` - - the search ra - ` ` dec ` ` - - the search dec * * Return : * * - ` ` results ` ` - - list of result dictionaries"""
self . log . info ( 'starting the ``_parse_the_ned_results`` method' ) results = [ ] resultLen = 0 if nedResults : # OPEN THE RESULT FILE FROM NED pathToReadFile = nedResults try : self . log . debug ( "attempting to open the file %s" % ( pathToReadFile , ) ) readFile = codecs . open ( pathToReadFile , encoding = 'utf-8' , mode = 'rb' ) thisData = readFile . read ( ) readFile . close ( ) except IOError , e : message = 'could not open the file %s' % ( pathToReadFile , ) self . log . critical ( message ) raise IOError ( message ) readFile . close ( ) # CHECK FOR ERRORS if "Results from query to NASA/IPAC Extragalactic Database" not in thisData : print "something went wrong with the NED query" self . log . error ( "something went wrong with the NED query" % locals ( ) ) sys . exit ( 0 ) # SEARCH FROM MATCHES IN RESULTS FILE matchObject = re . search ( r"No\.\|Object Name.*?\n(.*)" , thisData , re . S ) if matchObject : theseLines = string . split ( matchObject . group ( ) , '\n' ) resultLen = len ( theseLines ) csvReader = csv . DictReader ( theseLines , dialect = 'excel' , delimiter = '|' , quotechar = '"' ) for row in csvReader : thisEntry = { "searchRa" : ra , "searchDec" : dec , "matchName" : row [ "Object Name" ] . strip ( ) } results . append ( thisEntry ) if self . nearestOnly : break self . log . info ( 'completed the ``_parse_the_ned_results`` method' ) return results , resultLen
def gisland ( self , dae ) : """Reset g ( x ) for islanded buses and areas"""
if not ( self . islanded_buses and self . island_sets ) : return a , v = list ( ) , list ( ) # for islanded areas without a slack bus for island in self . island_sets : nosw = 1 for item in self . system . SW . bus : if self . uid [ item ] in island : nosw = 0 break if nosw : self . islanded_buses += island self . island_sets . remove ( island ) a = self . islanded_buses v = [ self . n + item for item in a ] dae . g [ a ] = 0 dae . g [ v ] = 0
def requireCleanup ( self ) : """If you intend to use any signal / slot connections on this QTreeWidgetItem , you will need to call the requireCleanup method and implement manual disconnections in the destroy method . QTreeWidgetItem ' s do not inherit from QObject , and as such do not utilize the memory cleanup associated with QObject connections ."""
try : tree . destroyed . connect ( self . destroy , QtCore . Qt . UniqueConnection ) except StandardError : pass
def Parse ( self , stat , file_object , knowledge_base ) : """Parse the wtmp file ."""
_ , _ = stat , knowledge_base users = { } wtmp = file_object . read ( ) while wtmp : try : record = UtmpStruct ( wtmp ) except utils . ParsingError : break wtmp = wtmp [ record . size : ] # Users only appear for USER _ PROCESS events , others are system . if record . ut_type != 7 : continue # Lose the null termination record . user = record . user . split ( b"\x00" , 1 ) [ 0 ] # Store the latest login time . # TODO ( user ) : remove the 0 here once RDFDatetime can support times # pre - epoch properly . try : users [ record . user ] = max ( users [ record . user ] , record . sec , 0 ) except KeyError : users [ record . user ] = record . sec for user , last_login in iteritems ( users ) : yield rdf_client . User ( username = utils . SmartUnicode ( user ) , last_logon = last_login * 1000000 )
def parse_domain ( url ) : """parse the domain from the url"""
domain_match = lib . DOMAIN_REGEX . match ( url ) if domain_match : return domain_match . group ( )
def init_opdata ( l , from_mod , version = None , is_pypy = False ) : """Sets up a number of the structures found in Python ' s opcode . py . Python opcode . py routines assign attributes to modules . In order to do this in a modular way here , the local dictionary for the module is passed ."""
if version : l [ 'python_version' ] = version l [ 'is_pypy' ] = is_pypy l [ 'cmp_op' ] = cmp_op l [ 'HAVE_ARGUMENT' ] = HAVE_ARGUMENT if version <= 3.5 : l [ 'findlinestarts' ] = findlinestarts l [ 'findlabels' ] = findlabels l [ 'get_jump_targets' ] = get_jump_targets l [ 'get_jump_target_maps' ] = get_jump_target_maps else : l [ 'findlinestarts' ] = wordcode . findlinestarts l [ 'findlabels' ] = wordcode . findlabels l [ 'get_jump_targets' ] = wordcode . get_jump_targets l [ 'get_jump_target_maps' ] = wordcode . get_jump_target_maps l [ 'opmap' ] = deepcopy ( from_mod . opmap ) l [ 'opname' ] = deepcopy ( from_mod . opname ) for field in fields2copy : l [ field ] = list ( getattr ( from_mod , field ) )
def laminar_entry_Baehr_Stephan ( Re = None , Pr = None , L = None , Di = None ) : r'''Calculates average internal convection Nusselt number for laminar flows in pipe during the thermal and velocity entry region according to [ 1 ] _ as shown in [ 2 ] _ . . . math : : Nu _ D = \ frac { \ frac { 3.657 } { \ tanh [ 2.264 Gz _ D ^ { - 1/3 } + 1.7Gz _ D ^ { - 2/3 } ] } + 0.0499Gz _ D \ tanh ( Gz _ D ^ { - 1 } ) } { \ tanh ( 2.432Pr ^ { 1/6 } Gz _ D ^ { - 1/6 } ) } Gz = \ frac { D } { L } Re _ D Pr Parameters Re : float Reynolds number , [ - ] Pr : float Prandtl number , [ - ] L : float Length of pipe [ m ] Di : float Diameter of pipe [ m ] Returns Nu : float Nusselt number , [ - ] Notes As L gets larger , this equation becomes the constant - temperature Nusselt number . Examples > > > laminar _ entry _ Baehr _ Stephan ( Re = 100000 , Pr = 1.1 , L = 5 , Di = . 5) 72.65402046550976 References . . [ 1 ] Baehr , Hans Dieter , and Karl Stephan . Heat and Mass Transfer . Springer , 2013. . . [ 2 ] Bergman , Theodore L . , Adrienne S . Lavine , Frank P . Incropera , and David P . DeWitt . Introduction to Heat Transfer . 6E . Hoboken , NJ : Wiley , 2011.'''
Gz = Di / L * Re * Pr return ( 3.657 / tanh ( 2.264 * Gz ** ( - 1 / 3. ) + 1.7 * Gz ** ( - 2 / 3.0 ) ) + 0.0499 * Gz * tanh ( 1. / Gz ) ) / tanh ( 2.432 * Pr ** ( 1 / 6.0 ) * Gz ** ( - 1 / 6.0 ) )
def read ( self , size ) : """Read from the current offset a total number of ` size ` bytes and increment the offset by ` size ` : param int size : length of bytes to read : rtype : bytearray"""
if isinstance ( size , SV ) : size = size . value buff = self . __buff [ self . __idx : self . __idx + size ] self . __idx += size return buff
def find_clusters ( struct , connected_list ) : """Finds bonded clusters of atoms in the structure with periodic boundary conditions . If there are atoms that are not bonded to anything , returns [ 0,1,0 ] . ( For faster computation time in FindDimension ( ) ) Args : struct ( Structure ) : Input structure connected _ list : Must be made from the same structure with FindConnected ( ) function . An array of shape ( number of bonded pairs , 2 ) ; each row of is of the form [ atomi , atomj ] . Returns : max _ cluster : the size of the largest cluster in the crystal structure min _ cluster : the size of the smallest cluster in the crystal structure clusters : list of bonded clusters found here , clusters are formatted as sets of indices of atoms"""
n_atoms = len ( struct . species ) if len ( np . unique ( connected_list ) ) != n_atoms : return [ 0 , 1 , 0 ] if n_atoms == 0 : return [ 0 , 0 , 0 ] cluster_sizes = [ ] clusters = [ ] for atom in range ( n_atoms ) : connected_inds = np . where ( connected_list == atom ) [ 0 ] atom_cluster = np . unique ( connected_list [ connected_inds ] ) atom_cluster = set ( atom_cluster ) if len ( clusters ) == 0 : new_clusters = [ atom_cluster ] new_cluster_sizes = [ len ( atom_cluster ) ] else : clusters_w_atom = [ atom_cluster ] clusters_noatom = [ ] clusters_noatom_sizes = [ ] for cluster in clusters : if len ( cluster . intersection ( atom_cluster ) ) > 0 : clusters_w_atom . append ( cluster ) else : clusters_noatom . append ( cluster ) clusters_noatom_sizes . append ( len ( cluster ) ) if len ( clusters_w_atom ) > 1 : clusters_w_atom = [ set . union ( * clusters_w_atom ) ] new_clusters = clusters_noatom + clusters_w_atom new_cluster_sizes = clusters_noatom_sizes + [ len ( clusters_w_atom [ 0 ] ) ] clusters = list ( new_clusters ) cluster_sizes = list ( new_cluster_sizes ) if n_atoms in cluster_sizes : break max_cluster = max ( cluster_sizes ) min_cluster = min ( cluster_sizes ) return [ max_cluster , min_cluster , clusters ]
def with_organisation ( self , organisation ) : """Add an organisation segment . Args : organisation ( str ) : Official name of an administrative body holding an election . Returns : IdBuilder Raises : ValueError"""
if organisation is None : organisation = '' organisation = slugify ( organisation ) self . _validate_organisation ( organisation ) self . organisation = organisation return self
def get_instrument_history ( self , instrument , candle_format = "bidask" , granularity = 'S5' , count = 500 , daily_alignment = None , alignment_timezone = None , weekly_alignment = "Monday" , start = None , end = None ) : """See more : http : / / developer . oanda . com / rest - live / rates / # retrieveInstrumentHistory"""
url = "{0}/{1}/candles" . format ( self . domain , self . API_VERSION ) params = { "accountId" : self . account_id , "instrument" : instrument , "candleFormat" : candle_format , "granularity" : granularity , "count" : count , "dailyAlignment" : daily_alignment , "alignmentTimezone" : alignment_timezone , "weeklyAlignment" : weekly_alignment , "start" : start , "end" : end , } try : return self . _Client__call ( uri = url , params = params , method = "get" ) except RequestException : return False except AssertionError : return False
def _new_conn ( self ) : """Return a fresh : class : ` HTTPConnection ` ."""
self . num_connections += 1 log . debug ( "Starting new HTTP connection (%d): %s" , self . num_connections , self . host ) conn = self . ConnectionCls ( host = self . host , port = self . port , timeout = self . timeout . connect_timeout , strict = self . strict , ** self . conn_kw ) return conn
def datatype ( dbtype , description , cursor ) : """Google AppEngine Helper to convert a data type into a string ."""
dt = cursor . db . introspection . get_field_type ( dbtype , description ) if type ( dt ) is tuple : return dt [ 0 ] else : return dt
def get_user_permissions ( user ) : '''Returns the queryset of permissions for the given user .'''
permissions = SeedPermission . objects . all ( ) # User must be on a team that grants the permission permissions = permissions . filter ( seedteam__users = user ) # The team must be active permissions = permissions . filter ( seedteam__archived = False ) # The organization of that team must be active permissions = permissions . filter ( seedteam__organization__archived = False ) return permissions
def get_model_schema_and_features ( model_dir ) : """Get a local model ' s schema and features config . Args : model _ dir : local or GCS path of a model . Returns : A tuple of schema ( list ) and features config ( dict ) ."""
schema_file = os . path . join ( model_dir , 'assets.extra' , 'schema.json' ) schema = json . loads ( file_io . read_file_to_string ( schema_file ) ) features_file = os . path . join ( model_dir , 'assets.extra' , 'features.json' ) features_config = json . loads ( file_io . read_file_to_string ( features_file ) ) return schema , features_config
def _collect_box_count ( self , boxes ) : """Count the occurences of each box type ."""
count = Counter ( [ box . box_id for box in boxes ] ) # Add the counts in the superboxes . for box in boxes : if hasattr ( box , 'box' ) : count . update ( self . _collect_box_count ( box . box ) ) return count
def check_for_errors ( self ) : """Check connection and channel for errors . : raises AMQPChannelError : Raises if the channel encountered an error . : raises AMQPConnectionError : Raises if the connection encountered an error . : return :"""
try : self . _connection . check_for_errors ( ) except AMQPConnectionError : self . set_state ( self . CLOSED ) raise if self . exceptions : exception = self . exceptions [ 0 ] if self . is_open : self . exceptions . pop ( 0 ) raise exception if self . is_closed : raise AMQPChannelError ( 'channel was closed' )
def addsuffix ( subject , suffix , replace = False ) : """Adds the specified * suffix * to the * subject * . If * replace * is True , the old suffix will be removed first . If * suffix * is callable , it must accept exactly one argument and return a modified value ."""
if not suffix and not replace : return subject if replace : subject = rmvsuffix ( subject ) if suffix and callable ( suffix ) : subject = suffix ( subject ) elif suffix : subject += suffix return subject
def split ( self , dt ) : """Split the segments in the list is subsegments at least as long as dt"""
outlist = [ ] for seg in self : start = seg . start ( ) stop = seg . end ( ) id = seg . id ( ) while start < stop : tmpstop = start + dt if tmpstop > stop : tmpstop = stop elif tmpstop + dt > stop : tmpstop = int ( ( start + stop ) / 2 ) x = ScienceSegment ( tuple ( [ id , start , tmpstop , tmpstop - start ] ) ) outlist . append ( x ) start = tmpstop # save the split list and return length self . __sci_segs = outlist return len ( self )
def get ( method , hmc , uri , uri_parms , logon_required ) : """Operation : List Partitions of a CPC ( empty result if not in DPM mode ) ."""
cpc_oid = uri_parms [ 0 ] query_str = uri_parms [ 1 ] try : cpc = hmc . cpcs . lookup_by_oid ( cpc_oid ) except KeyError : raise InvalidResourceError ( method , uri ) # Reflect the result of listing the partition result_partitions = [ ] if cpc . dpm_enabled : filter_args = parse_query_parms ( method , uri , query_str ) for partition in cpc . partitions . list ( filter_args ) : result_partition = { } for prop in partition . properties : if prop in ( 'object-uri' , 'name' , 'status' ) : result_partition [ prop ] = partition . properties [ prop ] result_partitions . append ( result_partition ) return { 'partitions' : result_partitions }
def terminate_all ( self ) : """Terminate all currently running tasks ."""
logger . info ( 'Job {0} terminating all currently running tasks' . format ( self . name ) ) for task in self . tasks . itervalues ( ) : if task . started_at and not task . completed_at : task . terminate ( )
def char_size ( self , size ) : '''Changes font size Args : size : change font size . Options are 24 ' ' 32 ' ' 48 ' for bitmap fonts 33 , 38 , 42 , 46 , 50 , 58 , 67 , 75 , 83 , 92 , 100 , 117 , 133 , 150 , 167 , 200 233, 11 , 44 , 77 , 111 , 144 for outline fonts . Returns : None Raises : RuntimeError : Invalid font size . Warning : Your font is currently set to outline and you have selected a bitmap only font size Warning : Your font is currently set to bitmap and you have selected an outline only font size'''
sizes = { '24' : 0 , '32' : 0 , '48' : 0 , '33' : 0 , '38' : 0 , '42' : 0 , '46' : 0 , '50' : 0 , '58' : 0 , '67' : 0 , '75' : 0 , '83' : 0 , '92' : 0 , '100' : 0 , '117' : 0 , '133' : 0 , '150' : 0 , '167' : 0 , '200' : 0 , '233' : 0 , '11' : 1 , '44' : 1 , '77' : 1 , '111' : 1 , '144' : 1 } if size in sizes : if size in [ '24' , '32' , '48' ] and self . fonttype != self . font_types [ 'bitmap' ] : raise Warning ( 'Your font is currently set to outline and you have selected a bitmap only font size' ) if size not in [ '24' , '32' , '48' ] and self . fonttype != self . font_types [ 'outline' ] : raise Warning ( 'Your font is currently set to bitmap and you have selected an outline only font size' ) self . send ( chr ( 27 ) + 'X' + chr ( 0 ) + chr ( int ( size ) ) + chr ( sizes [ size ] ) ) else : raise RuntimeError ( 'Invalid size for function charSize, choices are auto 4pt 6pt 9pt 12pt 18pt and 24pt' )
def pulse_width ( self ) : """Returns the current pulse width controlling the servo ."""
if self . pwm_device . pin . frequency is None : return None else : return self . pwm_device . pin . state * self . frame_width
def start ( cls , settings = None ) : """RUN ME FIRST TO SETUP THE THREADED LOGGING http : / / victorlin . me / 2012/08 / good - logging - practice - in - python / log - LIST OF PARAMETERS FOR LOGGER ( S ) trace - SHOW MORE DETAILS IN EVERY LOG LINE ( default False ) cprofile - True = = ENABLE THE C - PROFILER THAT COMES WITH PYTHON ( default False ) USE THE LONG FORM TO SET THE FILENAME { " enabled " : True , " filename " : " cprofile . tab " } profile - True = = ENABLE pyLibrary SIMPLE PROFILING ( default False ) ( eg with Profiler ( " some description " ) : ) USE THE LONG FORM TO SET FILENAME { " enabled " : True , " filename " : " profile . tab " } constants - UPDATE MODULE CONSTANTS AT STARTUP ( PRIMARILY INTENDED TO CHANGE DEBUG STATE )"""
global _Thread if not settings : return settings = wrap ( settings ) Log . stop ( ) cls . settings = settings cls . trace = coalesce ( settings . trace , False ) if cls . trace : from mo_threads import Thread as _Thread _ = _Thread # ENABLE CPROFILE if settings . cprofile is False : settings . cprofile = { "enabled" : False } elif settings . cprofile is True : if isinstance ( settings . cprofile , bool ) : settings . cprofile = { "enabled" : True , "filename" : "cprofile.tab" } if settings . cprofile . enabled : from mo_threads import profiles profiles . enable_profilers ( settings . cprofile . filename ) if settings . profile is True or ( is_data ( settings . profile ) and settings . profile . enabled ) : Log . error ( "REMOVED 2018-09-02, Activedata revision 3f30ff46f5971776f8ba18" ) # from mo _ logs import profiles # if isinstance ( settings . profile , bool ) : # profiles . ON = True # settings . profile = { " enabled " : True , " filename " : " profile . tab " } # if settings . profile . enabled : # profiles . ON = True if settings . constants : constants . set ( settings . constants ) if settings . log : cls . logging_multi = StructuredLogger_usingMulti ( ) for log in listwrap ( settings . log ) : Log . add_log ( Log . new_instance ( log ) ) from mo_logs . log_usingThread import StructuredLogger_usingThread cls . main_log = StructuredLogger_usingThread ( cls . logging_multi )
def wait_instances_running ( ec2 , instances ) : """Wait until no instance in the given iterable is ' pending ' . Yield every instance that entered the running state as soon as it does . : param boto . ec2 . connection . EC2Connection ec2 : the EC2 connection to use for making requests : param Iterator [ Instance ] instances : the instances to wait on : rtype : Iterator [ Instance ]"""
running_ids = set ( ) other_ids = set ( ) while True : pending_ids = set ( ) for i in instances : if i . state == 'pending' : pending_ids . add ( i . id ) elif i . state == 'running' : assert i . id not in running_ids running_ids . add ( i . id ) yield i else : assert i . id not in other_ids other_ids . add ( i . id ) yield i log . info ( '%i instance(s) pending, %i running, %i other.' , * map ( len , ( pending_ids , running_ids , other_ids ) ) ) if not pending_ids : break seconds = max ( a_short_time , min ( len ( pending_ids ) , 10 * a_short_time ) ) log . info ( 'Sleeping for %is' , seconds ) time . sleep ( seconds ) for attempt in retry_ec2 ( ) : with attempt : instances = ec2 . get_only_instances ( list ( pending_ids ) )
def get_product_trades ( self , product_id , before = '' , after = '' , limit = None , result = None ) : """List the latest trades for a product . This method returns a generator which may make multiple HTTP requests while iterating through it . Args : product _ id ( str ) : Product before ( Optional [ str ] ) : start time in ISO 8601 after ( Optional [ str ] ) : end time in ISO 8601 limit ( Optional [ int ] ) : the desired number of trades ( can be more than 100, automatically paginated ) results ( Optional [ list ] ) : list of results that is used for the pagination Returns : list : Latest trades . Example : : " time " : " 2014-11-07T22:19:28.578544Z " , " trade _ id " : 74, " price " : " 10.00000 " , " size " : " 0.0100000 " , " side " : " buy " " time " : " 2014-11-07T01:08:43.642366Z " , " trade _ id " : 73, " price " : " 100.00000 " , " size " : " 0.0100000 " , " side " : " sell " """
return self . _send_paginated_message ( '/products/{}/trades' . format ( product_id ) )
def _write_avg_gradient ( self ) -> None : "Writes the average of the gradients to Tensorboard ."
avg_gradient = sum ( x . data . mean ( ) for x in self . gradients ) / len ( self . gradients ) self . _add_gradient_scalar ( 'avg_gradient' , scalar_value = avg_gradient )
def cut_microsoft_quote ( html_message ) : '''Cuts splitter block and all following blocks .'''
# use EXSLT extensions to have a regex match ( ) function with lxml ns = { "re" : "http://exslt.org/regular-expressions" } # general pattern : @ style = ' border : none ; border - top : solid < color > 1.0pt ; padding : 3.0pt 0 < unit > 0 < unit > 0 < unit > ' # outlook 2007 , 2010 ( international ) < color = # B5C4DF > < unit = cm > # outlook 2007 , 2010 ( american ) < color = # B5C4DF > < unit = pt > # outlook 2013 ( international ) < color = # E1E1E1 > < unit = cm > # outlook 2013 ( american ) < color = # E1E1E1 > < unit = pt > # also handles a variant with a space after the semicolon splitter = html_message . xpath ( # outlook 2007 , 2010 , 2013 ( international , american ) "//div[@style[re:match(., 'border:none; ?border-top:solid #(E1E1E1|B5C4DF) 1.0pt; ?" "padding:3.0pt 0(in|cm) 0(in|cm) 0(in|cm)')]]|" # windows mail "//div[@style='padding-top: 5px; " "border-top-color: rgb(229, 229, 229); " "border-top-width: 1px; border-top-style: solid;']" , namespaces = ns ) if splitter : splitter = splitter [ 0 ] # outlook 2010 if splitter == splitter . getparent ( ) . getchildren ( ) [ 0 ] : splitter = splitter . getparent ( ) else : # outlook 2003 splitter = html_message . xpath ( "//div" "/div[@class='MsoNormal' and @align='center' " "and @style='text-align:center']" "/font" "/span" "/hr[@size='3' and @width='100%' and @align='center' " "and @tabindex='-1']" ) if len ( splitter ) : splitter = splitter [ 0 ] splitter = splitter . getparent ( ) . getparent ( ) splitter = splitter . getparent ( ) . getparent ( ) if len ( splitter ) : parent = splitter . getparent ( ) after_splitter = splitter . getnext ( ) while after_splitter is not None : parent . remove ( after_splitter ) after_splitter = splitter . getnext ( ) parent . remove ( splitter ) return True return False
def check_output ( * args , ** kwargs ) : '''Compatibility wrapper for Python 2.6 missin g subprocess . check _ output'''
if hasattr ( subprocess , 'check_output' ) : return subprocess . check_output ( stderr = subprocess . STDOUT , universal_newlines = True , * args , ** kwargs ) else : process = subprocess . Popen ( * args , stdout = subprocess . PIPE , stderr = subprocess . STDOUT , universal_newlines = True , ** kwargs ) output , _ = process . communicate ( ) retcode = process . poll ( ) if retcode : error = subprocess . CalledProcessError ( retcode , args [ 0 ] ) error . output = output raise error return output
def get_auth ( self ) : """Returns auth response which has client token unless MFA is required"""
auth_resp = get_with_retry ( self . cerberus_url + '/v2/auth/user' , auth = ( self . username , self . password ) , headers = self . HEADERS ) if auth_resp . status_code != 200 : throw_if_bad_response ( auth_resp ) return auth_resp . json ( )
def _get_download_table_ids ( self ) : """Get a list of PyPI downloads table ( sharded per day ) IDs . : return : list of table names ( strings ) : rtype : ` ` list ` `"""
all_table_names = [ ] # matching per - date table names logger . info ( 'Querying for all tables in dataset' ) tables = self . service . tables ( ) request = tables . list ( projectId = self . _PROJECT_ID , datasetId = self . _DATASET_ID ) while request is not None : response = request . execute ( ) # if the number of results is evenly divisible by the page size , # we may end up with a last response that has no ' tables ' key , # and is empty . if 'tables' not in response : response [ 'tables' ] = [ ] for table in response [ 'tables' ] : if table [ 'type' ] != 'TABLE' : logger . debug ( 'Skipping %s (type=%s)' , table [ 'tableReference' ] [ 'tableId' ] , table [ 'type' ] ) continue if not self . _table_re . match ( table [ 'tableReference' ] [ 'tableId' ] ) : logger . debug ( 'Skipping table with non-matching name: %s' , table [ 'tableReference' ] [ 'tableId' ] ) continue all_table_names . append ( table [ 'tableReference' ] [ 'tableId' ] ) request = tables . list_next ( previous_request = request , previous_response = response ) return sorted ( all_table_names )
def resolved ( value ) : """Creates a promise object resolved with a certain value ."""
p = Promise ( ) p . _state = 'resolved' p . value = value return p
def _delete ( self , c , context , hm ) : """Delete a healthmonitor and ALL its pool associations"""
pools = hm . get ( "pools" , [ ] ) for pool in pools : pool_id = pool . get ( "pool_id" ) self . _dissociate ( c , context , hm , pool_id ) self . _delete_unused ( c , context , hm )
def standard_deviation ( numbers ) : """Return standard deviation ."""
numbers = list ( numbers ) if not numbers : return 0 mean = sum ( numbers ) / len ( numbers ) return ( sum ( ( n - mean ) ** 2 for n in numbers ) / len ( numbers ) ) ** .5
def add_JSsource ( self , new_src ) : """add additional js script source ( s )"""
if isinstance ( new_src , list ) : for h in new_src : self . JSsource . append ( h ) elif isinstance ( new_src , basestring ) : self . JSsource . append ( new_src ) else : raise OptionTypeError ( "Option: %s Not Allowed For Series Type: %s" % type ( new_src ) )
def addChild ( self , cur ) : """Add a new node to @ parent , at the end of the child ( or property ) list merging adjacent TEXT nodes ( in which case @ cur is freed ) If the new node is ATTRIBUTE , it is added into properties instead of children . If there is an attribute with equal name , it is first destroyed ."""
if cur is None : cur__o = None else : cur__o = cur . _o ret = libxml2mod . xmlAddChild ( self . _o , cur__o ) if ret is None : raise treeError ( 'xmlAddChild() failed' ) __tmp = xmlNode ( _obj = ret ) return __tmp
def timemap_stretch ( y , sr , time_map , rbargs = None ) : '''Apply a timemap stretch to an audio time series . A timemap stretch allows non - linear time - stretching by mapping source to target sample frame numbers for fixed time points within the audio data . This uses the ` time ` and ` timemap ` form for rubberband . Parameters y : np . ndarray [ shape = ( n , ) or ( n , c ) ] Audio time series , either single or multichannel sr : int > 0 Sampling rate of ` y ` time _ map : list Each element is a tuple ` t ` of length 2 which corresponds to the source sample position and target sample position . If ` t [ 1 ] < t [ 0 ] ` the track will be sped up in this area . ` time _ map [ - 1 ] ` must correspond to the lengths of the source audio and target audio . rbargs Additional keyword parameters for rubberband See ` rubberband - h ` for details . Returns y _ stretch : np . ndarray Time - stretched audio Raises ValueError if ` time _ map ` is not monotonic if ` time _ map ` is not non - negative if ` time _ map [ - 1 ] [ 0 ] ` is not the input audio length'''
if rbargs is None : rbargs = dict ( ) is_positive = all ( time_map [ i ] [ 0 ] >= 0 and time_map [ i ] [ 1 ] >= 0 for i in range ( len ( time_map ) ) ) is_monotonic = all ( time_map [ i ] [ 0 ] <= time_map [ i + 1 ] [ 0 ] and time_map [ i ] [ 1 ] <= time_map [ i + 1 ] [ 1 ] for i in range ( len ( time_map ) - 1 ) ) if not is_positive : raise ValueError ( 'time_map should be non-negative' ) if not is_monotonic : raise ValueError ( 'time_map is not monotonic' ) if time_map [ - 1 ] [ 0 ] != len ( y ) : raise ValueError ( 'time_map[-1] should correspond to the last sample' ) time_stretch = time_map [ - 1 ] [ 1 ] * 1.0 / time_map [ - 1 ] [ 0 ] rbargs . setdefault ( '--time' , time_stretch ) stretch_file = tempfile . NamedTemporaryFile ( mode = 'w' , suffix = '.txt' , delete = False ) try : for t in time_map : stretch_file . write ( '{:0} {:1}\n' . format ( t [ 0 ] , t [ 1 ] ) ) stretch_file . close ( ) rbargs . setdefault ( '--timemap' , stretch_file . name ) y_stretch = __rubberband ( y , sr , ** rbargs ) finally : # Remove temp file os . unlink ( stretch_file . name ) return y_stretch
def set_network ( self , machines , local_listen_port = 12400 , listen_time_out = 120 , num_machines = 1 ) : """Set the network configuration . Parameters machines : list , set or string Names of machines . local _ listen _ port : int , optional ( default = 12400) TCP listen port for local machines . listen _ time _ out : int , optional ( default = 120) Socket time - out in minutes . num _ machines : int , optional ( default = 1) The number of machines for parallel learning application . Returns self : Booster Booster with set network ."""
_safe_call ( _LIB . LGBM_NetworkInit ( c_str ( machines ) , ctypes . c_int ( local_listen_port ) , ctypes . c_int ( listen_time_out ) , ctypes . c_int ( num_machines ) ) ) self . network = True return self
def read_transform ( filename , dimension = 2 , precision = 'float' ) : """Read a transform from file ANTsR function : ` readAntsrTransform ` Arguments filename : string filename of transform dimension : integer spatial dimension of transform precision : string numerical precision of transform Returns ANTsTransform Example > > > import ants > > > tx = ants . new _ ants _ transform ( dimension = 2) > > > tx . set _ parameters ( ( 0.9,0,0,1.1,10,11 ) ) > > > ants . write _ transform ( tx , ' ~ / desktop / tx . mat ' ) > > > tx2 = ants . read _ transform ( ' ~ / desktop / tx . mat ' )"""
filename = os . path . expanduser ( filename ) if not os . path . exists ( filename ) : raise ValueError ( 'filename does not exist!' ) libfn1 = utils . get_lib_fn ( 'getTransformDimensionFromFile' ) dimension = libfn1 ( filename ) libfn2 = utils . get_lib_fn ( 'getTransformNameFromFile' ) transform_type = libfn2 ( filename ) libfn3 = utils . get_lib_fn ( 'readTransform%s%i' % ( utils . short_ptype ( precision ) , dimension ) ) itk_tx = libfn3 ( filename , dimension , precision ) return tio . ANTsTransform ( precision = precision , dimension = dimension , transform_type = transform_type , pointer = itk_tx )
def setup_prjs_signals ( self , ) : """Setup the signals for the projects page : returns : None : rtype : None : raises : None"""
log . debug ( "Setting up projects page signals." ) self . prjs_prj_view_pb . clicked . connect ( self . prjs_view_prj ) self . prjs_prj_create_pb . clicked . connect ( self . prjs_create_prj )
def get_tamil_words ( letters ) : """reverse a Tamil word according to letters , not unicode - points"""
if not isinstance ( letters , list ) : raise Exception ( "metehod needs to be used with list generated from 'tamil.utf8.get_letters(...)'" ) return [ word for word in get_words_iterable ( letters , tamil_only = True ) ]
def convolve_comb_lines ( lines_wave , lines_flux , sigma , crpix1 , crval1 , cdelt1 , naxis1 ) : """Convolve a set of lines of known wavelengths and flux . Parameters lines _ wave : array like Input array with wavelengths lines _ flux : array like Input array with fluxes sigma : float Sigma of the broadening gaussian to be applied . crpix1 : float CRPIX1 of the desired wavelength calibration . crval1 : float CRVAL1 of the desired wavelength calibration . cdelt1 : float CDELT1 of the desired wavelength calibration . naxis1 : integer NAXIS1 of the output spectrum . Returns xwave : array like Array with wavelengths for the output spectrum . spectrum : array like Array with the expected fluxes at each pixel ."""
# generate wavelengths for output spectrum xwave = crval1 + ( np . arange ( naxis1 ) + 1 - crpix1 ) * cdelt1 # initialize output spectrum spectrum = np . zeros ( naxis1 ) # convolve each line for wave , flux in zip ( lines_wave , lines_flux ) : sp_tmp = gauss_box_model ( x = xwave , amplitude = flux , mean = wave , stddev = sigma ) spectrum += sp_tmp return xwave , spectrum
def _convert_event_api ( lambda_logical_id , event_properties ) : """Converts a AWS : : Serverless : : Function ' s Event Property to an Api configuration usable by the provider . : param str lambda _ logical _ id : Logical Id of the AWS : : Serverless : : Function : param dict event _ properties : Dictionary of the Event ' s Property : return tuple : tuple of API resource name and Api namedTuple"""
path = event_properties . get ( SamApiProvider . _EVENT_PATH ) method = event_properties . get ( SamApiProvider . _EVENT_METHOD ) # An API Event , can have RestApiId property which designates the resource that owns this API . If omitted , # the API is owned by Implicit API resource . This could either be a direct resource logical ID or a # " Ref " of the logicalID api_resource_id = event_properties . get ( "RestApiId" , SamApiProvider . _IMPLICIT_API_RESOURCE_ID ) if isinstance ( api_resource_id , dict ) and "Ref" in api_resource_id : api_resource_id = api_resource_id [ "Ref" ] # This is still a dictionary . Something wrong with the template if isinstance ( api_resource_id , dict ) : LOG . debug ( "Invalid RestApiId property of event %s" , event_properties ) raise InvalidSamDocumentException ( "RestApiId property of resource with logicalId '{}' is invalid. " "It should either be a LogicalId string or a Ref of a Logical Id string" . format ( lambda_logical_id ) ) return api_resource_id , Api ( path = path , method = method , function_name = lambda_logical_id )
def execute_prepared_cql_query ( self , itemId , values ) : """Executes a prepared CQL ( Cassandra Query Language ) statement by passing an id token and a list of variables to bind and returns a CqlResult containing the results . Parameters : - itemId - values"""
self . _seqid += 1 d = self . _reqs [ self . _seqid ] = defer . Deferred ( ) self . send_execute_prepared_cql_query ( itemId , values ) return d
def use_comparative_resource_view ( self ) : """Pass through to provider ResourceLookupSession . use _ comparative _ resource _ view"""
self . _object_views [ 'resource' ] = COMPARATIVE # self . _ get _ provider _ session ( ' resource _ lookup _ session ' ) # To make sure the session is tracked for session in self . _get_provider_sessions ( ) : try : session . use_comparative_resource_view ( ) except AttributeError : pass
def _bse_cli_lookup_by_role ( args ) : '''Handles the lookup - by - role subcommand'''
return api . lookup_basis_by_role ( args . basis , args . role , args . data_dir )
def least_loaded_node ( self ) : """Choose the node with fewest outstanding requests , with fallbacks . This method will prefer a node with an existing connection and no in - flight - requests . If no such node is found , a node will be chosen randomly from disconnected nodes that are not " blacked out " ( i . e . , are not subject to a reconnect backoff ) . If no node metadata has been obtained , will return a bootstrap node ( subject to exponential backoff ) . Returns : node _ id or None if no suitable node was found"""
nodes = [ broker . nodeId for broker in self . cluster . brokers ( ) ] random . shuffle ( nodes ) inflight = float ( 'inf' ) found = None for node_id in nodes : conn = self . _conns . get ( node_id ) connected = conn is not None and conn . connected ( ) blacked_out = conn is not None and conn . blacked_out ( ) curr_inflight = len ( conn . in_flight_requests ) if conn is not None else 0 if connected and curr_inflight == 0 : # if we find an established connection # with no in - flight requests , we can stop right away return node_id elif not blacked_out and curr_inflight < inflight : # otherwise if this is the best we have found so far , record that inflight = curr_inflight found = node_id if found is not None : return found return None
async def async_set_operation_mode ( self , operation_mode : OperationMode , password : str = '' ) -> None : """Set the operation mode on the base unit . : param operation _ mode : the operation mode to change to : param password : if specified , will be used instead of the password property when issuing the command"""
await self . _protocol . async_execute ( SetOpModeCommand ( operation_mode ) , password = password )
def query_order ( self , transaction_id = None , out_trade_no = None ) : """查询订单 api : param transaction _ id : 二选一 微信订单号 微信的订单号 , 优先使用 : param out _ trade _ no : 二选一 商户订单号 商户系统内部的订单号 , 当没提供transaction _ id时需要传这个 。 : return : 返回的结果信息"""
if not transaction_id and not out_trade_no : raise ValueError ( "transaction_id and out_trade_no must be a choice." ) data = { "appid" : self . appid , "mch_id" : self . mch_id , "transaction_id" : transaction_id , "out_trade_no" : out_trade_no , } return self . _post ( "pay/paporderquery" , data = data )
def _read_style ( style ) : '''Deal with different style format ( str , list , tuple )'''
if isinstance ( style , string_types ) : style = [ style ] else : style = list ( style ) return style
def parse_msdos ( self , lines ) : '''Parse lines from a MS - DOS format .'''
for line in lines : fields = line . split ( None , 4 ) date_str = fields [ 0 ] time_str = fields [ 1 ] datetime_str = '{} {}' . format ( date_str , time_str ) file_datetime = self . parse_datetime ( datetime_str ) [ 0 ] if fields [ 2 ] == '<DIR>' : file_size = None file_type = 'dir' else : file_size = parse_int ( fields [ 2 ] ) file_type = 'file' filename = fields [ 3 ] yield FileEntry ( filename , file_type , file_size , file_datetime )
def bulk_attachments ( self , article , attachments ) : """This function implements associating attachments to an article after article creation ( for unassociated attachments ) . : param article : Article id or : class : ` Article ` object : param attachments : : class : ` ArticleAttachment ` object , or list of : class : ` ArticleAttachment ` objects , up to 20 supported . ` Zendesk documentation . < https : / / developer . zendesk . com / rest _ api / docs / help _ center / articles # associate - attachments - in - bulk - to - article > ` _ _ : return :"""
return HelpdeskAttachmentRequest ( self ) . post ( self . endpoint . bulk_attachments , article = article , attachments = attachments )
def box ( b , line_width = 2 , color = 'g' , style = '-' ) : """Draws a box on the current plot . Parameters b : : obj : ` autolab _ core . Box ` box to draw line _ width : int width of lines on side of box color : : obj : ` str ` color of box style : : obj : ` str ` style of lines to draw"""
if not isinstance ( b , Box ) : raise ValueError ( 'Input must be of type Box' ) # get min pixels min_i = b . min_pt [ 1 ] min_j = b . min_pt [ 0 ] max_i = b . max_pt [ 1 ] max_j = b . max_pt [ 0 ] top_left = np . array ( [ min_i , min_j ] ) top_right = np . array ( [ max_i , min_j ] ) bottom_left = np . array ( [ min_i , max_j ] ) bottom_right = np . array ( [ max_i , max_j ] ) # create lines left = np . c_ [ top_left , bottom_left ] . T right = np . c_ [ top_right , bottom_right ] . T top = np . c_ [ top_left , top_right ] . T bottom = np . c_ [ bottom_left , bottom_right ] . T # plot lines plt . plot ( left [ : , 0 ] , left [ : , 1 ] , linewidth = line_width , color = color , linestyle = style ) plt . plot ( right [ : , 0 ] , right [ : , 1 ] , linewidth = line_width , color = color , linestyle = style ) plt . plot ( top [ : , 0 ] , top [ : , 1 ] , linewidth = line_width , color = color , linestyle = style ) plt . plot ( bottom [ : , 0 ] , bottom [ : , 1 ] , linewidth = line_width , color = color , linestyle = style )
def ParseShadowEntry ( self , line ) : """Extract the user accounts in / etc / shadow . Identifies the users in / etc / shadow and several attributes of their account , including how their password is crypted and password aging characteristics . Args : line : An entry of the shadow file ."""
fields = ( "login" , "passwd" , "last_change" , "min_age" , "max_age" , "warn_time" , "inactivity" , "expire" , "reserved" ) if line : rslt = dict ( zip ( fields , line . split ( ":" ) ) ) pw_entry = self . shadow . setdefault ( rslt [ "login" ] , rdf_client . PwEntry ( ) ) pw_entry . store = self . shadow_store pw_entry . hash_type = self . GetHashType ( rslt [ "passwd" ] ) # Tread carefully here in case these values aren ' t set . last_change = rslt . get ( "last_change" ) if last_change : pw_entry . age = int ( last_change ) max_age = rslt . get ( "max_age" ) if max_age : pw_entry . max_age = int ( max_age )
def lift ( fn = None , state_fn = None ) : """The lift decorator function will be used to abstract away the management of the state object used as the intermediate representation of actions . : param function answer : a function to provide the result of some action given a value : param function state : a function to provide what the new state looks like : returns function : a function suitable for use in actions"""
if fn is None : return partial ( lift , state_fn = state_fn ) @ wraps ( fn ) def _lift ( * args , ** kwargs ) : def _run ( state ) : ans = fn ( * cons ( state , args ) , ** kwargs ) s = state_fn ( state ) if state_fn is not None else ans return { 'answer' : ans , 'state' : s } return _run return _lift
def pointSampler ( actor , distance = None ) : """Algorithm to generate points the specified distance apart ."""
poly = actor . polydata ( True ) pointSampler = vtk . vtkPolyDataPointSampler ( ) if not distance : distance = actor . diagonalSize ( ) / 100.0 pointSampler . SetDistance ( distance ) # pointSampler . GenerateVertexPointsOff ( ) # pointSampler . GenerateEdgePointsOff ( ) # pointSampler . GenerateVerticesOn ( ) # pointSampler . GenerateInteriorPointsOn ( ) pointSampler . SetInputData ( poly ) pointSampler . Update ( ) uactor = Actor ( pointSampler . GetOutput ( ) ) prop = vtk . vtkProperty ( ) prop . DeepCopy ( actor . GetProperty ( ) ) uactor . SetProperty ( prop ) return uactor