signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def create ( cls , d ) :
"""Create a : class : ` ~ pypot . primitive . move . Move ` from a dictionary .""" | move = cls ( d [ 'framerate' ] )
move . _timed_positions . update ( d [ 'positions' ] )
return move |
def activate_eco ( self ) :
"""Activates the comfort temperature .""" | value = struct . pack ( 'B' , PROP_ECO )
self . _conn . make_request ( PROP_WRITE_HANDLE , value ) |
def fields_for_model ( model ) :
"""This function returns the fields for a schema that matches the provided
nautilus model .
Args :
model ( nautilus . model . BaseModel ) : The model to base the field list on
Returns :
( dict < field _ name : str , graphqlType > ) : A mapping of field names to
graphql types""" | # the attribute arguments ( no filters )
args = { field . name . lower ( ) : convert_peewee_field ( field ) for field in model . fields ( ) }
# use the field arguments , without the segments
return args |
def color ( self , key ) :
"""Returns the color value for the given key for this console .
: param key | < unicode >
: return < QtGui . QColor >""" | if type ( key ) == int :
key = self . LoggingMap . get ( key , ( 'NotSet' , '' ) ) [ 0 ]
name = nativestring ( key ) . capitalize ( )
return self . _colorSet . color ( name ) |
def add_configurations ( self , experiments ) :
"""Chains generator given experiment specifications .
Arguments :
experiments ( Experiment | list | dict ) : Experiments to run .""" | experiment_list = convert_to_experiment_list ( experiments )
for experiment in experiment_list :
self . _trial_generator = itertools . chain ( self . _trial_generator , self . _generate_trials ( experiment . spec , experiment . name ) ) |
def set_bind ( self ) :
"""Sets key bindings - - we need this more than once""" | IntegerEntry . set_bind ( self )
self . bind ( '<Next>' , lambda e : self . set ( 0 ) ) |
def detect_mbr ( self , filename , offset , fs_id ) :
"""Used by rawdisk . session . Session to match mbr partitions against
filesystem plugins .
Args :
filename : device or file that it will read in order to detect
the filesystem fs _ id : filesystem id to match ( ex . 0x07)
offset : offset for the filesystem that is being matched
Returns :
Volume object supplied by matched plugin .
If there is no match , None is returned""" | self . logger . debug ( 'Detecting MBR partition type' )
if fs_id not in self . __mbr_plugins :
return None
else :
plugins = self . __mbr_plugins . get ( fs_id )
for plugin in plugins :
if plugin . detect ( filename , offset ) :
return plugin . get_volume_object ( )
return None |
def save_graph_to_file ( graph_file_name , module_spec , class_count ) :
"""Saves an graph to file , creating a valid quantized one if necessary .""" | sess , _ , _ , _ , _ , _ = build_eval_session ( module_spec , class_count )
graph = sess . graph
output_graph_def = tf . graph_util . convert_variables_to_constants ( sess , graph . as_graph_def ( ) , [ FLAGS . final_tensor_name ] )
with tf . gfile . GFile ( graph_file_name , 'wb' ) as f :
f . write ( output_graph_def . SerializeToString ( ) ) |
def to_Message ( self , result = None ) :
"""Entrypoint - > Message
: param kser . result . Result result : Execution result
: return : Kafka message
: rtype kser . schemas . Message""" | return Message ( uuid = self . uuid , entrypoint = self . __class__ . path , params = self . params , result = result if result else self . result , metadata = self . metadata ) |
def get_qpimage ( self , idx ) :
"""Return background - corrected QPImage of data at index ` idx `""" | if self . _bgdata : # The user has explicitly chosen different background data
# using ` get _ qpimage _ raw ` .
qpi = super ( SeriesHdf5Qpimage , self ) . get_qpimage ( idx )
else : # We can use the background data stored in the qpimage hdf5 file
with self . _qpseries ( ) as qps :
qpi = qps . get_qpimage ( index = idx ) . copy ( )
# Force meta data
for key in self . meta_data :
qpi [ key ] = self . meta_data [ key ]
# set identifier
qpi [ "identifier" ] = self . get_identifier ( idx )
return qpi |
def get_charm_series ( path ) :
"""Inspects the charm directory at ` ` path ` ` and returns a default
series from its metadata . yaml ( the first item in the ' series ' list ) .
Returns None if no series can be determined .""" | md = Path ( path ) / "metadata.yaml"
if not md . exists ( ) :
return None
data = yaml . load ( md . open ( ) )
series = data . get ( 'series' )
return series [ 0 ] if series else None |
def fractional_base ( fractional_part , input_base = 10 , output_base = 10 , max_depth = 100 ) :
"""Convert the fractional part of a number from any base to any base .
Args :
fractional _ part ( iterable container ) : The fractional part of a number in
the following form : ( " . " , int , int , int , . . . )
input _ base ( int ) : The base to convert from ( defualt 10 ) .
output _ base ( int ) : The base to convert to ( default 10 ) .
max _ depth ( int ) : The maximum number of decimal digits to output .
Returns :
The converted number as a tuple of digits .
Example :
> > > fractional _ base ( ( " . " , 6 , ) , 10,16,10)
( ' . ' , 9 , 9 , 9 , 9 , 9 , 9 , 9 , 9 , 9 , 9)""" | fractional_part = fractional_part [ 1 : ]
fractional_digits = len ( fractional_part )
numerator = 0
for i , value in enumerate ( fractional_part , 1 ) :
numerator += value * input_base ** ( fractional_digits - i )
denominator = input_base ** fractional_digits
i = 1
digits = [ ]
while ( i < max_depth + 1 ) :
numerator *= output_base ** i
digit = numerator // denominator
numerator -= digit * denominator
denominator *= output_base ** i
digits . append ( digit )
i += 1
greatest_common_divisor = gcd ( numerator , denominator )
numerator //= greatest_common_divisor
denominator //= greatest_common_divisor
return ( "." , ) + tuple ( digits ) |
def post ( self , url , data = None , ** kwargs ) :
"""Encapsulte requests . post to use this class instance header""" | return requests . post ( url , data = data , headers = self . add_headers ( ** kwargs ) ) |
def __postprocess_analyzed_result ( self , string_mecab_parsed_result , is_feature , is_surface ) : # type : ( text _ type , bool , bool ) - > List [ TokenizedResult ]
"""Extract surface word and feature from analyzed lines .
Extracted results are returned with list , whose elements are TokenizedResult class
[ TokenizedResult ]""" | assert isinstance ( string_mecab_parsed_result , str )
check_tab_separated_line = lambda x : True if '\t' in x else False
tokenized_objects = [ self . __result_parser ( analyzed_line = analyzed_line , is_feature = is_feature , is_surface = is_surface ) for analyzed_line in string_mecab_parsed_result . split ( '\n' ) if not analyzed_line == 'EOS' and check_tab_separated_line ( analyzed_line ) ]
assert isinstance ( tokenized_objects , list )
return tokenized_objects |
def model ( self , model , create = True ) :
'''Returns the : class : ` SessionModel ` for ` ` model ` ` which
can be : class : ` Model ` , or a : class : ` MetaClass ` , or an instance
of : class : ` Model ` .''' | manager = self . manager ( model )
sm = self . _models . get ( manager )
if sm is None and create :
sm = SessionModel ( manager )
self . _models [ manager ] = sm
return sm |
def update_account_data ( self ) -> None :
"""Get basic information for the account .""" | response = get ( _url ( "/accounts/{0}/identifiers" . format ( self . _account_uid ) , self . _sandbox ) , headers = self . _auth_headers )
response . raise_for_status ( )
response = response . json ( )
self . account_identifier = response . get ( 'accountIdentifier' )
self . bank_identifier = response . get ( 'bankIdentifier' )
self . iban = response . get ( 'iban' )
self . bic = response . get ( 'bic' ) |
def generate_urls_for_genre ( self , genre_url ) :
"""Generate URL ' s for genre""" | letters = list ( string . ascii_uppercase )
urls = [ ]
for letter in letters :
base = '{}&letter={}' . format ( genre_url , letter )
page = r . get ( base )
tree = html . fromstring ( page . content )
elements = tree . xpath ( "//ul[@class='list paginate']" )
if not elements :
urls . append ( base )
else :
for i in xrange ( 1 , self . _find_num_pages ( base ) ) :
urls . append ( '{}&page={}#page' . format ( base , i ) )
return urls |
def add_callback ( self , events , callback ) :
"""Add a new callback .""" | if self . _poll is None :
raise RuntimeError ( 'poll instance is closed' )
if events & ~ ( READABLE | WRITABLE ) :
raise ValueError ( 'illegal event mask: {}' . format ( events ) )
if events & READABLE :
self . _readers += 1
if events & WRITABLE :
self . _writers += 1
handle = add_callback ( self , callback , events )
self . _sync ( )
return handle |
def as_dict ( self , replace_value_names = True ) :
"""Iterate , link terms and convert to a dict""" | # This function is a hack , due to confusion between the root of the document , which
# should contain all terms , and the root section , which has only terms that are not
# in another section . So , here we are taking the Root section , and adding all of the other
# terms to it , as if it were also the root of the document tree .
r = RootSectionTerm ( doc = self )
for s in self : # Iterate over sections
for t in s : # Iterate over the terms in each section .
r . terms . append ( t )
return r . as_dict ( replace_value_names ) |
def ma_bias_ratio_pivot ( self , data , sample_size = 5 , position = False ) :
"""Calculate pivot point""" | sample = data [ - sample_size : ]
if position is True :
check_value = max ( sample )
pre_check_value = max ( sample ) > 0
elif position is False :
check_value = min ( sample )
pre_check_value = max ( sample ) < 0
return ( ( sample_size - sample . index ( check_value ) < 4 and sample . index ( check_value ) != sample_size - 1 and pre_check_value ) , sample_size - sample . index ( check_value ) - 1 , check_value ) |
def satisfiesShape ( cntxt : Context , n : Node , S : ShExJ . Shape , c : DebugContext ) -> bool :
"""` 5.5.2 Semantics < http : / / shex . io / shex - semantics / # triple - expressions - semantics > ` _
For a node ` n ` , shape ` S ` , graph ` G ` , and shapeMap ` m ` , ` satisfies ( n , S , G , m ) ` if and only if :
* ` neigh ( G , n ) ` can be partitioned into two sets matched and remainder such that
` matches ( matched , expression , m ) ` . If expression is absent , remainder = ` neigh ( G , n ) ` .
: param n : focus node
: param S : Shape to be satisfied
: param cntxt : Evaluation context
: param c : Debug context
: return : true iff ` satisfies ( n , S , cntxt ) `""" | # Recursion detection . If start _ evaluating returns a boolean value , this is the assumed result of the shape
# evaluation . If it returns None , then an initial evaluation is needed
rslt = cntxt . start_evaluating ( n , S )
if rslt is None :
cntxt . evaluate_stack . append ( ( n , S . id ) )
predicates = directed_predicates_in_expression ( S , cntxt )
matchables = RDFGraph ( )
# Note : The code below does an " over - slurp " for the sake of expediency . If you are interested in
# getting EXACTLY the needed triples , set cntxt . over _ slurp to false
if isinstance ( cntxt . graph , SlurpyGraph ) and cntxt . over_slurp :
with slurper ( cntxt , n , S ) as g :
_ = g . triples ( ( n , None , None ) )
for predicate , direction in predicates . items ( ) :
with slurper ( cntxt , n , S ) as g :
matchables . add_triples ( g . triples ( ( n if direction . is_fwd else None , iriref_to_uriref ( predicate ) , n if direction . is_rev else None ) ) )
if c . debug :
print ( c . i ( 1 , "predicates:" , sorted ( cntxt . n3_mapper . n3 ( p ) for p in predicates . keys ( ) ) ) )
print ( c . i ( 1 , "matchables:" , sorted ( cntxt . n3_mapper . n3 ( m ) for m in matchables ) ) )
print ( )
if S . closed : # TODO : Is this working correctly on reverse items ?
non_matchables = RDFGraph ( [ t for t in arcsOut ( cntxt . graph , n ) if t not in matchables ] )
if len ( non_matchables ) :
cntxt . fail_reason = "Unmatched triples in CLOSED shape:"
cntxt . fail_reason = '\n' . join ( f"\t{t}" for t in non_matchables )
if c . debug :
print ( c . i ( 0 , f"<--- Satisfies shape {c.d()} FAIL - " f"{len(non_matchables)} non-matching triples on a closed shape" ) )
print ( c . i ( 1 , "" , list ( non_matchables ) ) )
print ( )
return False
# Evaluate the actual expression . Start assuming everything matches . . .
if S . expression :
if matches ( cntxt , matchables , S . expression ) :
rslt = True
else :
extras = { iriref_to_uriref ( e ) for e in S . extra } if S . extra is not None else { }
if len ( extras ) :
permutable_matchables = RDFGraph ( [ t for t in matchables if t . p in extras ] )
non_permutable_matchables = RDFGraph ( [ t for t in matchables if t not in permutable_matchables ] )
if c . debug :
print ( c . i ( 1 , f"Complete match failed -- evaluating extras" , list ( extras ) ) )
for matched , remainder in partition_2 ( permutable_matchables ) :
permutation = non_permutable_matchables . union ( matched )
if matches ( cntxt , permutation , S . expression ) :
rslt = True
break
rslt = rslt or False
else :
rslt = True
# Empty shape
# If an assumption was made and the result doesn ' t match the assumption , switch directions and try again
done , consistent = cntxt . done_evaluating ( n , S , rslt )
if not done :
rslt = satisfiesShape ( cntxt , n , S )
rslt = rslt and consistent
cntxt . evaluate_stack . pop ( )
return rslt |
def get_emitter ( self , event_type : str ) -> Callable :
"""Get the callable that emits the given type of events .""" | if event_type not in self . events :
raise ValueError ( f'No event {event_type} in system.' )
return self . events . get_emitter ( event_type ) |
def _run_configure_script ( self , script ) :
"""Run the script to install the Juju agent on the target machine .
: param str script : The script returned by the ProvisioningScript API
: raises : : class : ` paramiko . ssh _ exception . AuthenticationException `
if the upload fails""" | _ , tmpFile = tempfile . mkstemp ( )
with open ( tmpFile , 'w' ) as f :
f . write ( script )
try : # get ssh client
ssh = self . _get_ssh_client ( self . host , "ubuntu" , self . private_key_path , )
# copy the local copy of the script to the remote machine
sftp = paramiko . SFTPClient . from_transport ( ssh . get_transport ( ) )
sftp . put ( tmpFile , tmpFile , )
# run the provisioning script
stdout , stderr = self . _run_command ( ssh , "sudo /bin/bash {}" . format ( tmpFile ) , )
except paramiko . ssh_exception . AuthenticationException as e :
raise e
finally :
os . remove ( tmpFile )
ssh . close ( ) |
def _validate_class_definition ( meta , classname , bases , dict_ ) :
"""Ensure the matcher class definition is acceptable .
: raise RuntimeError : If there is a problem""" | # let the BaseMatcher class be created without hassle
if meta . _is_base_matcher_class_definition ( classname , dict_ ) :
return
# ensure that no important magic methods are being overridden
for name , member in dict_ . items ( ) :
if not ( name . startswith ( '__' ) and name . endswith ( '__' ) ) :
continue
# check if it ' s not a whitelisted magic method name
name = name [ 2 : - 2 ]
if not name :
continue
# unlikely case of a ` ` _ _ _ _ ` ` function
if name not in meta . _list_magic_methods ( BaseMatcher ) :
continue
if name in meta . USER_OVERRIDABLE_MAGIC_METHODS :
continue
# non - function attributes , like _ _ slots _ _ , are harmless
if not inspect . isfunction ( member ) :
continue
# classes in this very module are exempt , since they define
# the very behavior of matchers we want to protect
if member . __module__ == __name__ :
continue
raise RuntimeError ( "matcher class %s cannot override the __%s__ method" % ( classname , name ) ) |
def get_nets_jpnic ( self , response ) :
"""The function for parsing network blocks from jpnic whois data .
Args :
response ( : obj : ` str ` ) : The response from the jpnic server .
Returns :
list of dict : Mapping of networks with start and end positions .
' cidr ' ( str ) - The network routing block
' start ' ( int ) - The starting point of the network
' end ' ( int ) - The endpoint point of the network""" | nets = [ ]
# Iterate through all of the networks found , storing the CIDR value
# and the start and end positions .
for match in re . finditer ( r'^.*?(\[Network Number\])[^\S\n]+.+?>(?P<val>.+?)</A>$' , response , re . MULTILINE ) :
try :
net = copy . deepcopy ( BASE_NET )
tmp = ip_network ( match . group ( 2 ) )
try : # pragma : no cover
network_address = tmp . network_address
except AttributeError : # pragma : no cover
network_address = tmp . ip
pass
try : # pragma : no cover
broadcast_address = tmp . broadcast_address
except AttributeError : # pragma : no cover
broadcast_address = tmp . broadcast
pass
net [ 'range' ] = '{0} - {1}' . format ( network_address + 1 , broadcast_address )
cidr = ip_network ( match . group ( 2 ) . strip ( ) ) . __str__ ( )
net [ 'cidr' ] = cidr
net [ 'start' ] = match . start ( )
net [ 'end' ] = match . end ( )
nets . append ( net )
except ( ValueError , TypeError ) :
pass
return nets |
def main ( ) : # noqa
"""Attempt to fully destroy AWS Resources for a Spinnaker Application .""" | logging . basicConfig ( format = LOGGING_FORMAT )
parser = argparse . ArgumentParser ( description = main . __doc__ )
add_debug ( parser )
add_app ( parser )
args = parser . parse_args ( )
if args . debug == logging . DEBUG :
logging . getLogger ( __package__ . split ( '.' ) [ 0 ] ) . setLevel ( args . debug )
else :
LOG . setLevel ( args . debug )
for env in ENVS :
for region in REGIONS :
LOG . info ( 'DESTROY %s:%s' , env , region )
try :
destroy_dns ( app = args . app , env = env )
except botocore . exceptions . ClientError as error :
LOG . warning ( 'DNS issue for %s in %s: %s' , env , region , error )
try :
destroy_elb ( app = args . app , env = env , region = region )
except SpinnakerError :
pass
try :
destroy_iam ( app = args . app , env = env )
except botocore . exceptions . ClientError as error :
LOG . warning ( 'IAM issue for %s in %s: %s' , env , region , error )
try :
destroy_s3 ( app = args . app , env = env )
except botocore . exceptions . ClientError as error :
LOG . warning ( 'S3 issue for %s in %s: %s' , env , region , error )
try :
destroy_sg ( app = args . app , env = env , region = region )
except SpinnakerError :
pass
LOG . info ( 'Destroyed %s:%s' , env , region )
LOG . info ( 'Destruction complete.' ) |
def extract_subset ( self , * constants ) :
"""Create a subset of entries
This subset is a new ` ` Choices ` ` instance , with only the wanted constants from the
main ` ` Choices ` ` ( each " choice entry " in the subset is shared from the main ` ` Choices ` ` )
Parameters
* constants : list
The constants names of this ` ` Choices ` ` object to make available in the subset .
Returns
Choices
The newly created subset , which is a ` ` Choices ` ` object
Example
> > > STATES = Choices (
. . . ( ' ONLINE ' , 1 , ' Online ' ) ,
. . . ( ' DRAFT ' , 2 , ' Draft ' ) ,
. . . ( ' OFFLINE ' , 3 , ' Offline ' ) ,
> > > STATES
[ ( ' ONLINE ' , 1 , ' Online ' ) , ( ' DRAFT ' , 2 , ' Draft ' ) , ( ' OFFLINE ' , 3 , ' Offline ' ) ]
> > > subset = STATES . extract _ subset ( ' DRAFT ' , ' OFFLINE ' )
> > > subset
[ ( ' DRAFT ' , 2 , ' Draft ' ) , ( ' OFFLINE ' , 3 , ' Offline ' ) ]
> > > subset . DRAFT
> > > subset . for _ constant ( ' DRAFT ' ) is STATES . for _ constant ( ' DRAFT ' )
True
> > > subset . ONLINE
Traceback ( most recent call last ) :
AttributeError : ' Choices ' object has no attribute ' ONLINE '
Raises
ValueError
If a constant is not defined as a constant in the ` ` Choices ` ` instance .""" | # Ensure that all passed constants exists as such in the list of available constants .
bad_constants = set ( constants ) . difference ( self . constants )
if bad_constants :
raise ValueError ( "All constants in subsets should be in parent choice. " "Missing constants: %s." % list ( bad_constants ) )
# Keep only entries we asked for .
choice_entries = [ self . constants [ c ] for c in constants ]
# Create a new ` ` Choices ` ` instance with the limited set of entries , and pass the other
# configuration attributes to share the same behavior as the current ` ` Choices ` ` .
# Also we set ` ` mutable ` ` to False to disable the possibility to add new choices to the
# subset .
subset = self . __class__ ( * choice_entries , ** { 'dict_class' : self . dict_class , 'mutable' : False , } )
return subset |
def get_cached_manylinux_wheel ( self , package_name , package_version , disable_progress = False ) :
"""Gets the locally stored version of a manylinux wheel . If one does not exist , the function downloads it .""" | cached_wheels_dir = os . path . join ( tempfile . gettempdir ( ) , 'cached_wheels' )
if not os . path . isdir ( cached_wheels_dir ) :
os . makedirs ( cached_wheels_dir )
wheel_file = '{0!s}-{1!s}-{2!s}' . format ( package_name , package_version , self . manylinux_wheel_file_suffix )
wheel_path = os . path . join ( cached_wheels_dir , wheel_file )
if not os . path . exists ( wheel_path ) or not zipfile . is_zipfile ( wheel_path ) : # The file is not cached , download it .
wheel_url = self . get_manylinux_wheel_url ( package_name , package_version )
if not wheel_url :
return None
print ( " - {}=={}: Downloading" . format ( package_name , package_version ) )
with open ( wheel_path , 'wb' ) as f :
self . download_url_with_progress ( wheel_url , f , disable_progress )
if not zipfile . is_zipfile ( wheel_path ) :
return None
else :
print ( " - {}=={}: Using locally cached manylinux wheel" . format ( package_name , package_version ) )
return wheel_path |
def mark ( self , scope = 'process' ) :
"""Set up the profiler state to record operator .
Parameters
scope : string , optional
Indicates what scope the marker should refer to .
Can be ' global ' , ' process ' , thread ' , task ' , and ' marker '
Default is ` process ` .""" | check_call ( _LIB . MXProfileSetMarker ( self . domain . handle , c_str ( self . name ) , c_str ( scope ) ) ) |
def mainloop ( self ) :
"""The main loop .""" | if not self . args :
self . parser . error ( "No metafiles given, nothing to do!" )
if 1 < sum ( bool ( i ) for i in ( self . options . no_ssl , self . options . reannounce , self . options . reannounce_all ) ) :
self . parser . error ( "Conflicting options --no-ssl, --reannounce and --reannounce-all!" )
# Set filter criteria for metafiles
filter_url_prefix = None
if self . options . reannounce : # < scheme > : / / < netloc > / < path > ? < query >
filter_url_prefix = urlparse . urlsplit ( self . options . reannounce , allow_fragments = False )
filter_url_prefix = urlparse . urlunsplit ( ( filter_url_prefix . scheme , filter_url_prefix . netloc , '/' , '' , '' # bogus pylint : disable = E1103
) )
self . LOG . info ( "Filtering for metafiles with announce URL prefix %r..." % filter_url_prefix )
if self . options . reannounce_all :
self . options . reannounce = self . options . reannounce_all
else : # When changing the announce URL w / o changing the domain , don ' t change the info hash !
self . options . no_cross_seed = True
# Resolve tracker alias , if URL doesn ' t look like an URL
if self . options . reannounce and not urlparse . urlparse ( self . options . reannounce ) . scheme :
tracker_alias , idx = self . options . reannounce , "0"
if '.' in tracker_alias :
tracker_alias , idx = tracker_alias . split ( '.' , 1 )
try :
idx = int ( idx , 10 )
_ , tracker_url = config . lookup_announce_alias ( tracker_alias )
self . options . reannounce = tracker_url [ idx ]
except ( KeyError , IndexError , TypeError , ValueError ) as exc :
raise error . UserError ( "Unknown tracker alias or bogus URL %r (%s)!" % ( self . options . reannounce , exc ) )
# go through given files
bad = 0
changed = 0
for filename in self . args :
try : # Read and remember current content
metainfo = bencode . bread ( filename )
old_metainfo = bencode . bencode ( metainfo )
except ( EnvironmentError , KeyError , bencode . BencodeError ) as exc :
self . LOG . warning ( "Skipping bad metafile %r (%s: %s)" % ( filename , type ( exc ) . __name__ , exc ) )
bad += 1
else : # Check metafile integrity
try :
metafile . check_meta ( metainfo )
except ValueError as exc :
self . LOG . warn ( "Metafile %r failed integrity check: %s" % ( filename , exc , ) )
if not self . options . no_skip :
continue
# Skip any metafiles that don ' t meet the pre - conditions
if filter_url_prefix and not metainfo [ 'announce' ] . startswith ( filter_url_prefix ) :
self . LOG . warn ( "Skipping metafile %r no tracked by %r!" % ( filename , filter_url_prefix , ) )
continue
# Keep resume info safe
libtorrent_resume = { }
if "libtorrent_resume" in metainfo :
try :
libtorrent_resume [ "bitfield" ] = metainfo [ "libtorrent_resume" ] [ "bitfield" ]
except KeyError :
pass
# nothing to remember
libtorrent_resume [ "files" ] = copy . deepcopy ( metainfo [ "libtorrent_resume" ] [ "files" ] )
# Change private flag ?
if self . options . make_private and not metainfo [ "info" ] . get ( "private" , 0 ) :
self . LOG . info ( "Setting private flag..." )
metainfo [ "info" ] [ "private" ] = 1
if self . options . make_public and metainfo [ "info" ] . get ( "private" , 0 ) :
self . LOG . info ( "Clearing private flag..." )
del metainfo [ "info" ] [ "private" ]
# Remove non - standard keys ?
if self . options . clean or self . options . clean_all or self . options . clean_xseed :
metafile . clean_meta ( metainfo , including_info = not self . options . clean , logger = self . LOG . info )
# Restore resume info ?
if self . options . clean_xseed :
if libtorrent_resume :
self . LOG . info ( "Restoring key 'libtorrent_resume'..." )
metainfo . setdefault ( "libtorrent_resume" , { } )
metainfo [ "libtorrent_resume" ] . update ( libtorrent_resume )
else :
self . LOG . warn ( "No resume information found!" )
# Clean rTorrent data ?
if self . options . clean_rtorrent :
for key in self . RT_RESUMT_KEYS :
if key in metainfo :
self . LOG . info ( "Removing key %r..." % ( key , ) )
del metainfo [ key ]
# Change announce URL ?
if self . options . reannounce :
metainfo [ 'announce' ] = self . options . reannounce
if "announce-list" in metainfo :
del metainfo [ "announce-list" ]
if not self . options . no_cross_seed : # Enforce unique hash per tracker
metainfo [ "info" ] [ "x_cross_seed" ] = hashlib . md5 ( self . options . reannounce ) . hexdigest ( )
if self . options . no_ssl : # We ' re assuming here the same ( default ) port is used
metainfo [ 'announce' ] = ( metainfo [ 'announce' ] . replace ( "https://" , "http://" ) . replace ( ":443/" , ":80/" ) )
# Change comment or creation date ?
if self . options . comment is not None :
if self . options . comment :
metainfo [ "comment" ] = self . options . comment
elif "comment" in metainfo :
del metainfo [ "comment" ]
if self . options . bump_date :
metainfo [ "creation date" ] = int ( time . time ( ) )
if self . options . no_date and "creation date" in metainfo :
del metainfo [ "creation date" ]
# Add fast - resume data ?
if self . options . hashed :
try :
metafile . add_fast_resume ( metainfo , self . options . hashed . replace ( "{}" , metainfo [ "info" ] [ "name" ] ) )
except EnvironmentError as exc :
self . fatal ( "Error making fast-resume data (%s)" % ( exc , ) )
raise
# Set specific keys ?
metafile . assign_fields ( metainfo , self . options . set )
replace_fields ( metainfo , self . options . regex )
# Write new metafile , if changed
new_metainfo = bencode . bencode ( metainfo )
if new_metainfo != old_metainfo :
if self . options . output_directory :
filename = os . path . join ( self . options . output_directory , os . path . basename ( filename ) )
self . LOG . info ( "Writing %r..." % filename )
if not self . options . dry_run :
bencode . bwrite ( filename , metainfo )
if "libtorrent_resume" in metainfo : # Also write clean version
filename = filename . replace ( ".torrent" , "-no-resume.torrent" )
del metainfo [ "libtorrent_resume" ]
self . LOG . info ( "Writing %r..." % filename )
bencode . bwrite ( filename , metainfo )
else :
self . LOG . info ( "Changing %r..." % filename )
if not self . options . dry_run : # Write to temporary file
tempname = os . path . join ( os . path . dirname ( filename ) , '.' + os . path . basename ( filename ) , )
self . LOG . debug ( "Writing %r..." % tempname )
bencode . bwrite ( tempname , metainfo )
# Replace existing file
if os . name != "posix" : # cannot rename to existing target on WIN32
os . remove ( filename )
try :
os . rename ( tempname , filename )
except EnvironmentError as exc : # TODO : Try to write directly , keeping a backup !
raise error . LoggableError ( "Can't rename tempfile %r to %r (%s)" % ( tempname , filename , exc ) )
changed += 1
# Print summary
if changed :
self . LOG . info ( "%s %d metafile(s)." % ( "Would've changed" if self . options . dry_run else "Changed" , changed ) )
if bad :
self . LOG . warn ( "Skipped %d bad metafile(s)!" % ( bad ) ) |
def mute ( ) :
'''Mute the volume .
Mutes the volume .''' | # NOTE : mute ! = 0 volume
if system . get_name ( ) == 'windows' : # TODO : Implement volume for Windows . Looks like WinAPI is the
# solution . . .
pass
elif system . get_name ( ) == 'mac' :
sp . Popen ( [ 'osascript' , '-e' , 'set volume output muted true' ] ) . wait ( )
else : # Linux / Unix
if unix_is_pulseaudio_server ( ) :
sp . Popen ( [ 'amixer' , '--quiet' , '-D' , 'pulse' , 'sset' , 'Master' , 'mute' ] ) . wait ( )
# sset is * not * a typo
else :
sp . Popen ( [ 'amixer' , '--quiet' , 'sset' , 'Master' , 'mute' ] ) . wait ( ) |
def setup ( app ) :
"""Install the plugin .
: param app : Sphinx application context .""" | app . info ( 'Initializing GitHub plugin' )
app . add_role ( 'ghissue' , ghissue_role )
app . add_role ( 'ghpull' , ghissue_role )
app . add_role ( 'ghuser' , ghuser_role )
app . add_role ( 'ghcommit' , ghcommit_role )
app . add_config_value ( 'github_project_url' , None , 'env' )
return |
def delete_issue_remote_link_by_id ( self , issue_key , link_id ) :
"""Deletes Remote Link on Issue
: param issue _ key : str
: param link _ id : str""" | url = 'rest/api/2/issue/{issue_key}/remotelink/{link_id}' . format ( issue_key = issue_key , link_id = link_id )
return self . delete ( url ) |
def reset ( self ) :
"""Reset the simulation to its initial state .""" | warnings . warn ( "This reset method is very crude. It should work for " "many simple simulations, but we make no guarantees. In " "particular, if you have components that manage their " "own state in any way, this might not work." )
self . population . _population = self . _initial_population
self . clock . _time = self . _start_time |
def get_list_objects_arg_dict ( self , node_type ) :
"""Create a dict of arguments that will be passed to listObjects ( ) .
If { node _ type } is a CN , add filtering to include only objects from this GMN
instance in the ObjectList returned by CNCore . listObjects ( ) .""" | arg_dict = { # Restrict query for faster debugging
# " fromDate " : datetime . datetime ( 2017 , 1 , 1 ) ,
# " toDate " : datetime . datetime ( 2017 , 1 , 10 ) ,
}
if node_type == "cn" :
arg_dict [ "nodeId" ] = django . conf . settings . NODE_IDENTIFIER
return arg_dict |
def compute_hist ( self , bins = None , density = True , folded = False , weight = "duration" ) :
"""Computes histogram from the pitch data in Pitch object ( pitch ) , and creates
a Data object ( pypeaks ) .
: param bins : Refers to number of bins in the histogram , determines the granularity .
If it is not set , the number of bins which gives the highest granularity is chosen
automatically .
: param density : defaults to True , which means the histogram will be a normalized one .
: param folded : defaults to False . When set to True , all the octaves are folded to one .
: param weight : It can be one of the ' duration ' or ' instance ' . In the latter case , make
sure that the pitch object has the pitch values discretized .""" | # Step 1 : get the right pitch values
assert isinstance ( self . pitch_obj . pitch , np . ndarray )
valid_pitch = self . pitch_obj . pitch
valid_pitch = [ i for i in valid_pitch if i > - 10000 ]
if folded :
valid_pitch = map ( lambda x : int ( x % 1200 ) , valid_pitch )
# Step 2 : based on the weighing scheme , compute the histogram
if weight == "duration" : # Step 2.1 set the number of bins ( if not passed )
if not bins :
bins = max ( valid_pitch ) - min ( valid_pitch )
n , bin_edges = np . histogram ( valid_pitch , bins , density = density )
bin_centers = 0.5 * ( bin_edges [ 1 : ] + bin_edges [ : - 1 ] )
self . histogram = Data ( bin_centers , n )
elif weight == "instance" :
n = { }
i = 1
while i < len ( valid_pitch ) - 1 :
if ( valid_pitch [ i ] - valid_pitch [ i - 1 ] != 0 ) and ( valid_pitch [ i + 1 ] - valid_pitch [ i ] == 0 ) :
if valid_pitch [ i ] in n . keys ( ) :
n [ valid_pitch [ i ] ] += 1
else :
n [ valid_pitch [ i ] ] = 1
i += 1
n = n . items ( )
n . sort ( key = lambda x : x [ 0 ] )
n = np . array ( n )
self . histogram = Data ( n [ : , 0 ] , n [ : , 1 ] )
median_diff = np . median ( np . diff ( n [ : , 0 ] ) )
bin_edges = [ n [ 0 , 0 ] - median_diff / 2 ]
bin_edges . extend ( median_diff / 2 + n [ : , 0 ] )
n [ : , 1 ] = n [ : , 1 ] / ( n [ : , 1 ] . sum ( ) * np . diff ( bin_edges ) )
self . histogram = Data ( n [ : , 0 ] , n [ : , 1 ] , default_smooth = False ) |
def save ( self , f ) :
"""Save pickled model to file .""" | return pickle . dump ( ( self . perceptron . weights , self . tagdict , self . classes , self . clusters ) , f , protocol = pickle . HIGHEST_PROTOCOL ) |
def log_debug ( func , * args , ** kwargs ) :
'''Wrap call of provided function with debug log statements .''' | logging . debug ( 'Starting "%s" in thread %s...' , func . __name__ , current_thread ( ) )
results = func ( * args , ** kwargs )
logging . debug ( 'Successfully finished "%s" in thread %s.' , func . __name__ , current_thread ( ) )
return results |
def _should_base64_decode_body ( binary_types , flask_request , lamba_response_headers , is_base_64_encoded ) :
"""Whether or not the body should be decoded from Base64 to Binary
Parameters
binary _ types list ( basestring )
Corresponds to self . binary _ types ( aka . what is parsed from SAM Template
flask _ request flask . request
Flask request
lamba _ response _ headers dict
Headers Lambda returns
is _ base _ 64 _ encoded bool
True if the body is Base64 encoded
Returns
True if the body from the request should be converted to binary , otherwise false""" | best_match_mimetype = flask_request . accept_mimetypes . best_match ( [ lamba_response_headers [ "Content-Type" ] ] )
is_best_match_in_binary_types = best_match_mimetype in binary_types or '*/*' in binary_types
return best_match_mimetype and is_best_match_in_binary_types and is_base_64_encoded |
def _foldername ( self , additionalpath = "" ) :
"""Dot decorate a folder name .""" | if not self . _foldername_cache . get ( additionalpath ) :
fn = joinpath ( self . base , self . folder , additionalpath ) if not self . is_subfolder else joinpath ( self . base , ".%s" % self . folder , additionalpath )
self . _foldername_cache [ additionalpath ] = fn
return self . _foldername_cache [ additionalpath ] |
def num_values ( self ) :
"""Returns the number of ` value < N > ` attributes that will return a valid value
for the current mode .""" | self . _num_values , value = self . get_attr_int ( self . _num_values , 'num_values' )
return value |
def length ( self ) :
"""Length in Feet ( f )""" | length = self . parse [ 'attributes' ] . get ( 'Shape_Length' )
if length :
return round ( float ( length ) ) |
def get_route_io_data_types_for_route ( self , route ) : # type : ( ApiRoute ) - > typing . Set [ UserDefined ]
"""Given a route , returns a set of its argument / result / error datatypes .""" | data_types = set ( )
# type : typing . Set [ UserDefined ]
for dtype in ( route . arg_data_type , route . result_data_type , route . error_data_type ) :
while is_list_type ( dtype ) or is_nullable_type ( dtype ) :
data_list_type = dtype
# type : typing . Any
dtype = data_list_type . data_type
if is_composite_type ( dtype ) or is_alias ( dtype ) :
data_user_type = dtype
# type : typing . Any
data_types . add ( data_user_type )
return data_types |
def to_http_string ( self ) :
"""Return the string representation of the locale compatible with the
HTTP header ` ` Accept - Language ` ` as specified in ` RFC 7231
< https : / / tools . ietf . org / html / rfc7231 # section - 5.3.5 > _ `
The Accept - Language request HTTP header advertises which languages the
client is able to understand , and which locale variant is preferred .
@ return : a string representation of this locale compatible with HTTP
request , i . e . , a ISO 639-3 alpha - 2 , optionally followed by a dash
character ` ` - ` ` and a ISO 3166-1 alpha - 2 code .""" | return self . language_code [ : 2 ] if self . country_code is None else '%s-%s' % ( self . language_code [ : 2 ] , self . country_code ) |
def build_app_loggers ( log_level , apps , handlers = None ) :
"""Return a logger dict for app packages with the given log level and no
propogation since the apps list is parsed / normalized to be the set of top -
level apps . The optional handlers argument is provided so that this pattern
of app loggers can be used independently of the configure _ logger method
below , if desired .""" | # Use ' default ' handler provided by DEFAULT _ LOGGING config if
# not supplied .
if handlers is None :
handlers = [ 'default' ]
# The log config expects the handlers value to be a list , so let ' s
# make sure of that here .
if not isinstance ( handlers , list ) :
handlers = list ( handlers )
app_loggers = { }
for app in apps :
app_loggers [ app ] = { 'level' : log_level , 'handlers' : handlers , 'propagate' : False , }
return app_loggers |
def get_response ( self ) :
"""Get a response from the chatbot and display it .""" | user_input = self . usr_input . get ( )
self . usr_input . delete ( 0 , tk . END )
response = self . chatbot . get_response ( user_input )
self . conversation [ 'state' ] = 'normal'
self . conversation . insert ( tk . END , "Human: " + user_input + "\n" + "ChatBot: " + str ( response . text ) + "\n" )
self . conversation [ 'state' ] = 'disabled'
time . sleep ( 0.5 ) |
def data ( self , rows = None ) :
"""Access a batch of episodes from the memory .
Padding elements after the length of each episode are unspecified and might
contain old data .
Args :
rows : Episodes to select , defaults to all .
Returns :
Tuple containing a tuple of transition quantities with batch and time
dimensions , and a batch of sequence lengths .""" | rows = tf . range ( self . _capacity ) if rows is None else rows
assert rows . shape . ndims == 1
episode = tools . nested . map ( lambda var : tf . gather ( var , rows ) , self . _buffers )
length = tf . gather ( self . _length , rows )
return episode , length |
def iteration_stats ( self ) :
"""Construct iteration stats record tuple .""" | tk = self . timer . elapsed ( self . opt [ 'IterTimer' ] )
if self . xstep_itstat is None :
objfn = ( 0.0 , ) * 3
rsdl = ( 0.0 , ) * 2
rho = ( 0.0 , )
else :
objfn = ( self . xstep_itstat . ObjFun , self . xstep_itstat . DFid , self . xstep_itstat . RegL1 )
rsdl = ( self . xstep_itstat . PrimalRsdl , self . xstep_itstat . DualRsdl )
rho = ( self . xstep_itstat . Rho , )
cnstr = np . linalg . norm ( cr . zpad ( self . D , self . cri . Nv ) - self . G )
dltd = np . linalg . norm ( self . D - self . Dprv )
tpl = ( self . j , ) + objfn + rsdl + rho + ( cnstr , dltd , self . eta ) + self . itstat_extra ( ) + ( tk , )
return type ( self ) . IterationStats ( * tpl ) |
def parse_auth ( header ) :
"""Parse rfc2617 HTTP authentication header string ( basic ) and return
( user , pass ) tuple or None
( c ) 2014 , Marcel Hellkamp""" | try :
method , data = header . split ( None , 1 )
if method . lower ( ) == 'basic' :
data = base64 . b64decode ( uniorbytes ( data , bytes ) )
user , pwd = uniorbytes ( data ) . split ( ':' , 1 )
return user , pwd
except ( KeyError , AttributeError , ValueError ) :
return ( None , None ) |
def update_confirmation_comment ( self , confirmation_comment_id , confirmation_comment_dict ) :
"""Updates a confirmation comment
: param confirmation _ comment _ id : the confirmation comment id
: param confirmation _ comment _ dict : dict
: return : dict""" | return self . _create_put_request ( resource = CONFIRMATION_COMMENTS , billomat_id = confirmation_comment_id , send_data = confirmation_comment_dict ) |
def localize_file ( path_or_buffer ) :
'''Ensure localize target file .
If the target file is remote , this function fetches into local storage .
Args :
path ( str ) :
File path or file like object or URL of target file .
Returns :
filename ( str ) : file name in local storage
temporary _ file _ flag ( bool ) : temporary file flag''' | path_or_buffer = _stringify_path ( path_or_buffer )
if _is_url ( path_or_buffer ) :
req = urlopen ( path_or_buffer )
filename = os . path . basename ( req . geturl ( ) )
if os . path . splitext ( filename ) [ - 1 ] is not ".pdf" :
pid = os . getpid ( )
filename = "{0}.pdf" . format ( pid )
with open ( filename , 'wb' ) as f :
shutil . copyfileobj ( req , f )
return filename , True
elif is_file_like ( path_or_buffer ) :
pid = os . getpid ( )
filename = "{0}.pdf" . format ( pid )
with open ( filename , 'wb' ) as f :
shutil . copyfileobj ( path_or_buffer , f )
return filename , True
# File path case
else :
return os . path . expanduser ( path_or_buffer ) , False |
def app_trim_memory ( self , pid : int or str , level : str = 'RUNNING_LOW' ) -> None :
'''Trim memory .
Args :
level : HIDDEN | RUNNING _ MODERATE | BACKGROUNDRUNNING _ LOW | MODERATE | RUNNING _ CRITICAL | COMPLETE''' | _ , error = self . _execute ( '-s' , self . device_sn , 'shell' , 'am' , 'send-trim-memory' , str ( pid ) , level )
if error and error . startswith ( 'Error' ) :
raise ApplicationsException ( error . split ( ':' , 1 ) [ - 1 ] . strip ( ) ) |
def set_actuator_control_target_send ( self , time_usec , group_mlx , target_system , target_component , controls , force_mavlink1 = False ) :
'''Set the vehicle attitude and body angular rates .
time _ usec : Timestamp ( micros since boot or Unix epoch ) ( uint64 _ t )
group _ mlx : Actuator group . The " _ mlx " indicates this is a multi - instance message and a MAVLink parser should use this field to difference between instances . ( uint8 _ t )
target _ system : System ID ( uint8 _ t )
target _ component : Component ID ( uint8 _ t )
controls : Actuator controls . Normed to - 1 . . + 1 where 0 is neutral position . Throttle for single rotation direction motors is 0 . . 1 , negative range for reverse direction . Standard mapping for attitude controls ( group 0 ) : ( index 0-7 ) : roll , pitch , yaw , throttle , flaps , spoilers , airbrakes , landing gear . Load a pass - through mixer to repurpose them as generic outputs . ( float )''' | return self . send ( self . set_actuator_control_target_encode ( time_usec , group_mlx , target_system , target_component , controls ) , force_mavlink1 = force_mavlink1 ) |
def _NTU_from_P_solver ( P1 , R1 , NTU_min , NTU_max , function , ** kwargs ) :
'''Private function to solve the P - NTU method backwards , given the
function to use , the upper and lower NTU bounds for consideration ,
and the desired P1 and R1 values .''' | P1_max = _NTU_from_P_objective ( NTU_max , R1 , 0 , function , ** kwargs )
P1_min = _NTU_from_P_objective ( NTU_min , R1 , 0 , function , ** kwargs )
if P1 > P1_max :
raise ValueError ( 'No solution possible gives such a high P1; maximum P1=%f at NTU1=%f' % ( P1_max , NTU_max ) )
if P1 < P1_min :
raise ValueError ( 'No solution possible gives such a low P1; minimum P1=%f at NTU1=%f' % ( P1_min , NTU_min ) )
# Construct the function as a lambda expression as solvers don ' t support kwargs
to_solve = lambda NTU1 : _NTU_from_P_objective ( NTU1 , R1 , P1 , function , ** kwargs )
return ridder ( to_solve , NTU_min , NTU_max ) |
def _jitter ( self , durations , event , jitter_level , seed = None ) :
"""Determine extent to jitter tied event times . Automatically called by fit if tied event times are detected""" | np . random . seed ( seed )
if jitter_level <= 0 :
raise ValueError ( "The jitter level is less than zero, please select a jitter value greater than 0" )
event_times = durations [ event != 0 ] . copy ( )
n = event_times . shape [ 0 ]
# Determining extent to jitter event times up or down
shift = np . random . uniform ( low = - 1 , high = 1 , size = n ) * jitter_level
event_times += shift
durations_jitter = event_times . align ( durations ) [ 0 ] . fillna ( durations )
# Recursive call if event times are still tied after jitter
if self . _check_for_duplicates ( durations = durations_jitter , events = event ) :
return self . _jitter ( durations = durations_jitter , event = event , jitter_level = jitter_level , seed = seed )
return durations_jitter |
def unpack_rgb_image ( plane ) :
"""Return a correctly shaped numpy array given the image bytes .""" | assert plane . bits_per_pixel == 24 , "{} != 24" . format ( plane . bits_per_pixel )
size = point . Point . build ( plane . size )
data = np . frombuffer ( plane . data , dtype = np . uint8 )
return data . reshape ( size . y , size . x , 3 ) |
def timerEvent ( self , event ) :
"""Reimplemented to hide the widget when the hide timer fires .""" | if event . timerId ( ) == self . _hide_timer . timerId ( ) :
self . _hide_timer . stop ( )
self . hide ( ) |
def get_menu ( course , current , renderer , plugin_manager , user_manager ) :
"""Returns the HTML of the menu used in the administration . ` ` ` current ` ` ` is the current page of section""" | default_entries = [ ]
if user_manager . has_admin_rights_on_course ( course ) :
default_entries += [ ( "settings" , "<i class='fa fa-cog fa-fw'></i> " + _ ( "Course settings" ) ) ]
default_entries += [ ( "stats" , "<i class='fa fa-area-chart fa-fw'></i> " + _ ( "Stats" ) ) , ( "students" , "<i class='fa fa-user fa-fw'></i> " + _ ( "Students" ) ) ]
if not course . is_lti ( ) :
default_entries += [ ( "aggregations" , "<i class='fa fa-group fa-fw'></i> " + ( _ ( "Classrooms" ) if course . use_classrooms ( ) else _ ( "Teams" ) ) ) ]
default_entries += [ ( "tasks" , "<i class='fa fa-tasks fa-fw'></i> " + _ ( "Tasks" ) ) , ( "submissions" , "<i class='fa fa-search fa-fw'></i> " + _ ( "View submissions" ) ) , ( "download" , "<i class='fa fa-download fa-fw'></i> " + _ ( "Download submissions" ) ) ]
if user_manager . has_admin_rights_on_course ( course ) :
if web . ctx . app_stack [ 0 ] . webdav_host :
default_entries += [ ( "webdav" , "<i class='fa fa-folder-open fa-fw'></i> " + _ ( "WebDAV access" ) ) ]
default_entries += [ ( "replay" , "<i class='fa fa-refresh fa-fw'></i> " + _ ( "Replay submissions" ) ) , ( "danger" , "<i class='fa fa-bomb fa-fw'></i> " + _ ( "Danger zone" ) ) ]
# Hook should return a tuple ( link , name ) where link is the relative link from the index of the course administration .
additional_entries = [ entry for entry in plugin_manager . call_hook ( 'course_admin_menu' , course = course ) if entry is not None ]
return renderer . course_admin . menu ( course , default_entries + additional_entries , current ) |
def update ( self , friendly_name = values . unset , customer_name = values . unset , street = values . unset , city = values . unset , region = values . unset , postal_code = values . unset , emergency_enabled = values . unset , auto_correct_address = values . unset ) :
"""Update the AddressInstance
: param unicode friendly _ name : A string to describe the resource
: param unicode customer _ name : The name to associate with the address
: param unicode street : The number and street address of the address
: param unicode city : The city of the address
: param unicode region : The state or region of the address
: param unicode postal _ code : The postal code of the address
: param bool emergency _ enabled : Whether to enable emergency calling on the address
: param bool auto _ correct _ address : Whether we should automatically correct the address
: returns : Updated AddressInstance
: rtype : twilio . rest . api . v2010 . account . address . AddressInstance""" | return self . _proxy . update ( friendly_name = friendly_name , customer_name = customer_name , street = street , city = city , region = region , postal_code = postal_code , emergency_enabled = emergency_enabled , auto_correct_address = auto_correct_address , ) |
def compute_samples ( self ) :
"""Sample from a Normal distribution with inferred mu and std""" | shape = [ self . batch_size , self . eq_samples , self . iw_samples , self . num_latent ]
z = tf . nn . softmax ( self . pi )
return z |
def send_external ( self , http_verb , host , url , http_headers , chunk ) :
"""Used with create _ upload _ url to send a chunk the the possibly external object store .
: param http _ verb : str PUT or POST
: param host : str host we are sending the chunk to
: param url : str url to use when sending
: param http _ headers : object headers to send with the request
: param chunk : content to send
: return : requests . Response containing the successful result""" | if http_verb == 'PUT' :
return self . http . put ( host + url , data = chunk , headers = http_headers )
elif http_verb == 'POST' :
return self . http . post ( host + url , data = chunk , headers = http_headers )
else :
raise ValueError ( "Unsupported http_verb:" + http_verb ) |
def config_babel ( app ) :
"Init application with babel ." | babel . init_app ( app )
def get_locale ( ) :
return request . accept_languages . best_match ( app . config [ 'BABEL_LANGUAGES' ] )
babel . localeselector ( get_locale ) |
def _pre_mongod_server_start ( server , options_override = None ) :
"""Does necessary work before starting a server
1 - An efficiency step for arbiters running with - - no - journal
* there is a lock file = = >
* server must not have exited cleanly from last run , and does not know
how to auto - recover ( as a journalled server would )
* however : this is an arbiter , therefore
* there is no need to repair data files in any way = = >
* i can rm this lockfile and start my server""" | lock_file_path = server . get_lock_file_path ( )
no_journal = ( server . get_cmd_option ( "nojournal" ) or ( options_override and "nojournal" in options_override ) )
if ( os . path . exists ( lock_file_path ) and server . is_arbiter_server ( ) and no_journal ) :
log_warning ( "WARNING: Detected a lock file ('%s') for your server '%s'" " ; since this server is an arbiter, there is no need for" " repair or other action. Deleting mongod.lock and" " proceeding..." % ( lock_file_path , server . id ) )
try :
os . remove ( lock_file_path )
except Exception , e :
log_exception ( e )
raise MongoctlException ( "Error while trying to delete '%s'. " "Cause: %s" % ( lock_file_path , e ) ) |
def matrix_multiply ( m0 , m1 ) :
new_matrix = numpy . zeros ( ( m0 . shape [ 0 ] , m1 . shape [ 1 ] ) )
"omp parallel for private ( i , j , k , r )" | for i in xrange ( m0 . shape [ 0 ] ) :
for j in xrange ( m1 . shape [ 1 ] ) :
r = 0
for k in xrange ( m1 . shape [ 0 ] ) :
r += m0 [ i , k ] * m1 [ k , j ]
new_matrix [ i , j ] = r
return new_matrix |
def resolve_all ( self , import_items ) :
"""Resolves a list of imports .
Yields filenames .""" | for import_item in import_items :
try :
yield self . resolve_import ( import_item )
except ImportException as err :
logging . info ( 'unknown module %s' , err . module_name ) |
def windyields ( self , ini , end , delta , ** keyw ) :
"""This function returns the wind yields and ejected masses .
X _ i , E _ i = data . windyields ( ini , end , delta )
Parameters
ini : integer
The starting cycle .
end : integer
The finishing cycle .
delta : integer
The cycle interval .
keyw : dict
A dict of key word arguments .
Returns
list
The function returns a list of the wind yields ( X _ i ) and
a list of the ejected masses ( E _ i ) in the mass units that
were used ( usually solar masses ) .
Notes
The following keywords cand also be used :
| Keyword Argument | Default Value |
| abund | " iso _ massf " |
| tmass | " mass " |
| cycle | " cycle " |
The keyword arguments are used when the variables within the
input file differ in name from their default values typically
found in an MPPNP output file . If the data table differs in
name , use these keywords . For example , if the table for the
abundances is called " abundances " instead of " iso _ massf " , then
use abund = " abundances " as a keyword argument .""" | if ( "tmass" in keyw ) == False :
keyw [ "tmass" ] = "mass"
if ( "abund" in keyw ) == False :
keyw [ "abund" ] = "iso_massf"
if ( "cycle" in keyw ) == False :
keyw [ "cycle" ] = "cycle"
print ( "Windyields() initialised. Reading files..." )
ypsinit = [ ]
niso = 0
X_i = [ ]
E_i = [ ]
totalmass = [ ]
ypssurf = [ ]
cycles = [ ]
first = True
# The following statements copy global functions into local memory ,
# which is called faster , speeding up the code slightly
wc = self . _windcalc
cycleret = self . se . cycles
retrieve = self . se . get
capp = cycles . extend
tapp = totalmass . extend
yapp = ypssurf . extend
# Retrieve the data from the files
for i in range ( ini , end + 1 , delta ) :
step = int ( i )
capp ( [ int ( cycleret [ i - ini ] ) ] )
tapp ( [ retrieve ( step , keyw [ "tmass" ] ) ] )
yapp ( [ retrieve ( step , keyw [ "abund" ] ) ] )
print ( "Reading complete. Calculating yields and ejected masses..." )
nsteps = len ( cycles ) - 1
niso = len ( ypssurf [ 0 ] )
X_i = np . zeros ( [ niso ] , float )
E_i = np . zeros ( [ niso ] , float )
# Call the windyields calculator
X_i , E_i = wc ( first , totalmass , nsteps , niso , ypssurf , ypsinit , X_i , E_i , cycles )
return X_i , E_i |
def bin ( self , s ) :
"""Return a value as a binary string""" | return str ( s ) if s <= 1 else bin ( s >> 1 ) + str ( s & 1 ) |
def _get_result_color ( self , time_taken ) :
"""Get time taken result color .""" | time_taken_ms = time_taken * 1000
if time_taken_ms <= self . timer_ok :
color = 'green'
elif time_taken_ms <= self . timer_warning :
color = 'yellow'
else :
color = 'red'
return color |
def __search_iterable ( self , obj , item , parent = "root" , parents_ids = frozenset ( { } ) ) :
"""Search iterables except dictionaries , sets and strings .""" | for i , thing in enumerate ( obj ) :
new_parent = "%s[%s]" % ( parent , i )
if self . __skip_this ( thing , parent = new_parent ) :
continue
if self . case_sensitive or not isinstance ( thing , strings ) :
thing_cased = thing
else :
thing_cased = thing . lower ( )
if thing_cased == item :
self . __report ( report_key = 'matched_values' , key = new_parent , value = thing )
else :
item_id = id ( thing )
if parents_ids and item_id in parents_ids :
continue
parents_ids_added = add_to_frozen_set ( parents_ids , item_id )
self . __search ( thing , item , "%s[%s]" % ( parent , i ) , parents_ids_added ) |
def get_probs ( self , x , ** kwargs ) :
""": param x : A symbolic representation ( Tensor ) of the network input
: return : A symbolic representation ( Tensor ) of the output
probabilities ( i . e . , the output values produced by the softmax layer ) .""" | d = self . fprop ( x , ** kwargs )
if self . O_PROBS in d :
output = d [ self . O_PROBS ]
min_prob = tf . reduce_min ( output )
max_prob = tf . reduce_max ( output )
asserts = [ utils_tf . assert_greater_equal ( min_prob , tf . cast ( 0. , min_prob . dtype ) ) , utils_tf . assert_less_equal ( max_prob , tf . cast ( 1. , min_prob . dtype ) ) ]
with tf . control_dependencies ( asserts ) :
output = tf . identity ( output )
return output
elif self . O_LOGITS in d :
return tf . nn . softmax ( logits = d [ self . O_LOGITS ] )
else :
raise ValueError ( 'Cannot find probs or logits.' ) |
def _cache_form_details ( self , form ) :
"""Caches some form details to lates process and validate incoming ( response ) form data
Args :
form : form dict""" | cache = FormCache ( )
form [ 'model' ] [ 'form_key' ] = cache . form_id
form [ 'model' ] [ 'form_name' ] = self . __class__ . __name__
cache . set ( { 'model' : list ( form [ 'model' ] . keys ( ) ) , # In Python 3 , dictionary keys are not serializable
'non_data_fields' : self . non_data_fields } ) |
def probe_async ( self , callback ) :
"""Send advertisements for all connected devices .
Args :
callback ( callable ) : A callback for when the probe operation has completed .
callback should have signature callback ( adapter _ id , success , failure _ reason ) where :
success : bool
failure _ reason : None if success is True , otherwise a reason for why we could not probe""" | def _on_finished ( _name , control_info , exception ) :
if exception is not None :
callback ( self . id , False , str ( exception ) )
return
self . _control_info = control_info
try :
info = { 'connection_string' : "direct" , 'uuid' : control_info . uuid , 'signal_strength' : 100 }
self . _trigger_callback ( 'on_scan' , self . id , info , self . ExpirationTime )
finally :
callback ( self . id , True , None )
self . _control_thread . command ( JLinkControlThread . FIND_CONTROL , _on_finished , self . _device_info . ram_start , self . _device_info . ram_size ) |
def do_print_aldb ( self , args ) :
"""Print the All - Link database for a device .
Usage :
print _ aldb address | plm | all
Arguments :
address : INSTEON address of the device
plm : Print the All - Link database for the PLM
all : Print the All - Link database for all devices
This method requires that the device ALDB has been loaded .
To load the device ALDB use the command :
load _ aldb address | plm | all""" | params = args . split ( )
addr = None
try :
addr = params [ 0 ]
except IndexError :
_LOGGING . error ( 'Device address required.' )
self . do_help ( 'print_aldb' )
if addr :
if addr . lower ( ) == 'all' :
self . tools . print_all_aldb ( )
elif addr . lower ( ) == 'plm' :
addr = self . tools . plm . address . id
self . tools . print_device_aldb ( addr )
else :
self . tools . print_device_aldb ( addr ) |
def execute_paged_query ( self , verb , verb_arguments ) :
"""Executes query ( ex . list ) via a dedicated http object .
Args :
verb ( str ) : Method to execute on the component ( ex . get , list ) .
verb _ arguments ( dict ) : key - value pairs to be passed to _ BuildRequest .
Yields :
dict : Service Response .
Raises :
PaginationNotSupportedError : When an API does not support paging .""" | if not self . supports_pagination ( verb = verb ) :
raise PaginationNotSupported ( '{} does not support pagination' )
request = self . _build_request ( verb , verb_arguments )
number_of_pages_processed = 0
while request is not None :
response = self . _execute ( request )
number_of_pages_processed += 1
log . debug ( 'Executing paged request #%s' , number_of_pages_processed )
request = self . _build_next_request ( verb , request , response )
yield response |
def _get_socket_paths ( self ) :
"""Return a sequence of paths to sockets for communicating
with ceph daemons .""" | socket_pattern = os . path . join ( self . config [ 'socket_path' ] , ( self . config [ 'socket_prefix' ] + '*.' + self . config [ 'socket_ext' ] ) )
return glob . glob ( socket_pattern ) |
def get_real_filter ( layers , img_size ) :
"""Get the real filter sizes of each layer involved in
convoluation . See Xudong Cao :
https : / / www . kaggle . com / c / datasciencebowl / forums / t / 13166 / happy - lantern - festival - report - and - code
This does not yet take into consideration feature pooling ,
padding , striding and similar gimmicks .""" | real_filter = np . zeros ( ( len ( layers ) , 2 ) )
conv_mode = True
first_conv_layer = True
expon = np . ones ( ( 1 , 2 ) )
for i , layer in enumerate ( layers [ 1 : ] ) :
j = i + 1
if not conv_mode :
real_filter [ j ] = img_size
continue
if is_conv2d ( layer ) :
if not first_conv_layer :
new_filter = np . array ( layer . filter_size ) * expon
real_filter [ j ] = new_filter
else :
new_filter = np . array ( layer . filter_size ) * expon
real_filter [ j ] = new_filter
first_conv_layer = False
elif is_maxpool2d ( layer ) :
real_filter [ j ] = real_filter [ i ]
expon *= np . array ( layer . pool_size )
else :
conv_mode = False
real_filter [ j ] = img_size
real_filter [ 0 ] = img_size
return real_filter |
def format_line ( data , linestyle ) :
"""Formats a list of elements using the given line style""" | return linestyle . begin + linestyle . sep . join ( data ) + linestyle . end |
def do_GET ( self ) :
"""Accepts GET requests to http : / / localhost : 6500 / , and stores the query
params in the global dict . If shutdown _ on _ request is true , stop the
server after the first successful request .
The http request may contain the following query params :
- state : unique identifier , should match what we passed to reddit
- code : code that can be exchanged for a refresh token
- error : if provided , the OAuth error that occurred""" | parsed_path = urlparse ( self . path )
if parsed_path . path != '/' :
self . send_error ( 404 )
qs = parse_qs ( parsed_path . query )
self . params [ 'state' ] = qs [ 'state' ] [ 0 ] if 'state' in qs else None
self . params [ 'code' ] = qs [ 'code' ] [ 0 ] if 'code' in qs else None
self . params [ 'error' ] = qs [ 'error' ] [ 0 ] if 'error' in qs else None
body = self . build_body ( )
# send _ response also sets the Server and Date headers
self . send_response ( 200 )
self . send_header ( 'Content-Type' , 'text/html; charset=UTF-8' )
self . send_header ( 'Content-Length' , len ( body ) )
self . end_headers ( )
self . wfile . write ( body )
if self . shutdown_on_request : # Shutdown the server after serving the request
# http : / / stackoverflow . com / a / 22533929
thread = threading . Thread ( target = self . server . shutdown )
thread . daemon = True
thread . start ( ) |
def search_form ( context , search_model_names = None ) :
"""Includes the search form with a list of models to use as choices
for filtering the search by . Models should be a string with models
in the format ` ` app _ label . model _ name ` ` separated by spaces . The
string ` ` all ` ` can also be used , in which case the models defined
by the ` ` SEARCH _ MODEL _ CHOICES ` ` setting will be used .""" | template_vars = { "request" : context [ "request" ] , }
if not search_model_names or not settings . SEARCH_MODEL_CHOICES :
search_model_names = [ ]
elif search_model_names == "all" :
search_model_names = list ( settings . SEARCH_MODEL_CHOICES )
else :
search_model_names = search_model_names . split ( " " )
search_model_choices = [ ]
for model_name in search_model_names :
try :
model = apps . get_model ( * model_name . split ( "." , 1 ) )
except LookupError :
pass
else :
verbose_name = model . _meta . verbose_name_plural . capitalize ( )
search_model_choices . append ( ( verbose_name , model_name ) )
template_vars [ "search_model_choices" ] = sorted ( search_model_choices )
return template_vars |
def _compute_average_correct ( input_ , labels , per_example_weights , topk = 1 ) :
"""Returns the numerator and denominator of classifier accuracy .""" | return _compute_sparse_average_correct ( input_ , tf . reshape ( tf . argmax ( labels , 1 ) , [ - 1 , 1 ] ) , per_example_weights , topk = topk ) |
def create_event ( self , last_state , state , clean_server_name , replset_name ) :
"""Create an event with a message describing the replication
state of a mongo node""" | status = self . get_state_description ( state )
short_status = self . get_state_name ( state )
last_short_status = self . get_state_name ( last_state )
hostname = self . hostname_for_event ( clean_server_name )
msg_title = "%s is %s for %s" % ( hostname , short_status , replset_name )
msg = "MongoDB %s (%s) just reported as %s (%s) for %s; it was %s before."
msg = msg % ( hostname , clean_server_name , status , short_status , replset_name , last_short_status )
self . event ( { 'timestamp' : int ( time . time ( ) ) , 'source_type_name' : self . SOURCE_TYPE_NAME , 'msg_title' : msg_title , 'msg_text' : msg , 'host' : hostname , 'tags' : [ 'action:mongo_replset_member_status_change' , 'member_status:' + short_status , 'previous_member_status:' + last_short_status , 'replset:' + replset_name , ] , } ) |
def pack ( self ) :
"""Packs the field value into a byte string so it can be sent to the
server .
: param structure : The message structure class object
: return : A byte string of the packed field ' s value""" | value = self . _get_calculated_value ( self . value )
packed_value = self . _pack_value ( value )
size = self . _get_calculated_size ( self . size , packed_value )
if len ( packed_value ) != size :
raise ValueError ( "Invalid packed data length for field %s of %d " "does not fit field size of %d" % ( self . name , len ( packed_value ) , size ) )
return packed_value |
def encode ( * args , ** kwargs ) :
"""A helper function to encode an element .
@ param args : The python data to be encoded .
@ kwarg encoding : AMF encoding type . One of L { ENCODING _ TYPES } .
@ return : A L { util . BufferedByteStream } object that contains the data .""" | encoding = kwargs . pop ( 'encoding' , DEFAULT_ENCODING )
encoder = get_encoder ( encoding , ** kwargs )
[ encoder . writeElement ( el ) for el in args ]
stream = encoder . stream
stream . seek ( 0 )
return stream |
def delete ( self , queue , virtual_host = '/' ) :
"""Delete a Queue .
: param str queue : Queue name
: param str virtual _ host : Virtual host name
: raises ApiError : Raises if the remote server encountered an error .
: raises ApiConnectionError : Raises if there was a connectivity issue .
: rtype : dict""" | virtual_host = quote ( virtual_host , '' )
return self . http_client . delete ( API_QUEUE % ( virtual_host , queue ) ) |
def _send ( self , method , ** parameters ) :
"""Sends a message to the front - end and returns a promise .""" | msg = { 'index' : self . _calls , 'method' : method , }
msg . update ( parameters )
promise = SimplePromise ( )
self . _callbacks [ self . _calls ] = promise
self . _calls += 1
self . _comm . send ( msg )
return promise |
def _t_update_b ( self ) :
r"""A method to update ' b ' array at each time step according to
' t _ scheme ' and the source term value""" | network = self . project . network
phase = self . project . phases ( ) [ self . settings [ 'phase' ] ]
Vi = network [ 'pore.volume' ]
dt = self . settings [ 't_step' ]
s = self . settings [ 't_scheme' ]
if ( s == 'implicit' ) :
f1 , f2 , f3 = 1 , 1 , 0
elif ( s == 'cranknicolson' ) :
f1 , f2 , f3 = 0.5 , 1 , 0
elif ( s == 'steady' ) :
f1 , f2 , f3 = 1 , 0 , 1
x_old = self [ self . settings [ 'quantity' ] ]
b = ( f2 * ( 1 - f1 ) * ( - self . _A_steady ) * x_old + f2 * ( Vi / dt ) * x_old + f3 * np . zeros ( shape = ( self . Np , ) , dtype = float ) )
self . _update_physics ( )
for item in self . settings [ 'sources' ] :
Ps = self . pores ( item )
# Update b
b [ Ps ] = b [ Ps ] - f2 * ( 1 - f1 ) * ( phase [ item + '.' + 'rate' ] [ Ps ] )
self . _b = b
return b |
def gap_proportion ( sequences , gap_chars = '-' ) :
"""Generates a list with the proportion of gaps by index in a set of
sequences .""" | aln_len = None
gaps = [ ]
for i , sequence in enumerate ( sequences ) :
if aln_len is None :
aln_len = len ( sequence )
gaps = [ 0 ] * aln_len
else :
if not len ( sequence ) == aln_len :
raise ValueError ( ( "Unexpected sequence length {0}. Is this " "an alignment?" ) . format ( len ( sequence ) ) )
# Update any gap positions in gap list
for j , char in enumerate ( sequence . seq ) :
if char in gap_chars :
gaps [ j ] += 1
sequence_count = float ( i + 1 )
gap_props = [ i / sequence_count for i in gaps ]
return gap_props |
def _split_capakey ( self ) :
'''Split a capakey into more readable elements .
Splits a capakey into it ' s grondnummer , bisnummer , exponent and macht .''' | import re
match = re . match ( r"^[0-9]{5}[A-Z]{1}([0-9]{4})\/([0-9]{2})([A-Z\_]{1})([0-9]{3})$" , self . capakey )
if match :
self . grondnummer = match . group ( 1 )
self . bisnummer = match . group ( 2 )
self . exponent = match . group ( 3 )
self . macht = match . group ( 4 )
else :
raise ValueError ( "Invalid Capakey %s can't be parsed" % self . capakey ) |
def _gffastdivmod ( dividend , divisor ) :
'''Fast polynomial division by using Extended Synthetic Division and optimized for GF ( 2 ^ p ) computations ( so it is not generic , must be used with GF2int ) .
Transposed from the reedsolomon library : https : / / github . com / tomerfiliba / reedsolomon
BEWARE : it works only for monic divisor polynomial ! ( which is always the case with Reed - Solomon ' s generator polynomials )''' | msg_out = list ( dividend )
# Copy the dividend list and pad with 0 where the ecc bytes will be computed
for i in _range ( len ( dividend ) - ( len ( divisor ) - 1 ) ) :
coef = msg_out [ i ]
# precaching
if coef != 0 : # log ( 0 ) is undefined , so we need to avoid that case explicitly ( and it ' s also a good optimization )
for j in _range ( 1 , len ( divisor ) ) : # in synthetic division , we always skip the first coefficient of the divisior , because it ' s only used to normalize the dividend coefficient ( which is here useless since the divisor , the generator polynomial , is always monic )
# if divisor [ j ] ! = 0 : # log ( 0 ) is undefined so we need to check that , but it slow things down in fact and it ' s useless in our case ( reed - solomon encoding ) since we know that all coefficients in the generator are not 0
msg_out [ i + j ] ^= divisor [ j ] * coef
# equivalent to the more mathematically correct ( but xoring directly is faster ) : msg _ out [ i + j ] + = - divisor [ j ] * coef
# Note : we could speed things up a bit if we could inline the table lookups , but the Polynomial class is generic , it doesn ' t know anything about the underlying fields and their operators . Good OOP design , bad for performances in Python because of function calls and the optimizations we can ' t do ( such as precomputing gf _ exp [ divisor ] ) . That ' s what is done in reedsolo lib , this is one of the reasons it is faster .
# The resulting msg _ out contains both the quotient and the remainder , the remainder being the size of the divisor ( the remainder has necessarily the same degree as the divisor - - not length but degree = = length - 1 - - since it ' s what we couldn ' t divide from the dividend ) , so we compute the index where this separation is , and return the quotient and remainder .
separator = - ( len ( divisor ) - 1 )
return Polynomial ( msg_out [ : separator ] ) , Polynomial ( msg_out [ separator : ] ) |
def get_instance ( self , collection , obj_id , ** kwargs ) :
"""Get a record from the database with the id field matching ` obj _ id ` .""" | logging . info ( "Getting single record" )
try :
obj_id = ObjectId ( obj_id )
except InvalidId :
logging . error ( "Invalid ObjectId: {}" . format ( obj_id ) )
return { }
projection = kwargs . get ( 'projection' , { } )
projection = None if len ( projection ) < 1 else projection
query = kwargs . get ( 'query' , { } )
query . update ( { '_id' : obj_id } )
logging . debug ( "Query: {}" . format ( query ) )
logging . debug ( "Projection: {}" . format ( projection ) )
record = self . db [ collection ] . find_one ( query , projection )
return record |
def message ( self , executor_id , slave_id , message ) :
"""Sends a message from the framework to one of its executors .
These messages are best effort ; do not expect a framework message to be
retransmitted in any reliable fashion .""" | logging . info ( 'Sends message `{}` to executor `{}` on slave `{}`' . format ( message , executor_id , slave_id ) )
return self . driver . sendFrameworkMessage ( encode ( executor_id ) , encode ( slave_id ) , message ) |
def initGrid ( self ) :
"""Initialise the game grid""" | blinker = [ ( 4 , 4 ) , ( 4 , 5 ) , ( 4 , 6 ) ]
toad = [ ( 9 , 5 ) , ( 9 , 6 ) , ( 9 , 7 ) , ( 10 , 4 ) , ( 10 , 5 ) , ( 10 , 6 ) ]
glider = [ ( 4 , 11 ) , ( 5 , 12 ) , ( 6 , 10 ) , ( 6 , 11 ) , ( 6 , 12 ) ]
r_pentomino = [ ( 10 , 60 ) , ( 9 , 61 ) , ( 10 , 61 ) , ( 11 , 61 ) , ( 9 , 62 ) ]
self . grid = { }
if self . test :
for cell in chain ( blinker , toad , glider , r_pentomino ) :
self . grid [ cell ] = 1
else :
for _ in range ( self . initsize ) :
ry = random . randint ( self . y_pad , self . y_grid - 1 )
rx = random . randint ( self . x_pad , self . x_grid - 1 )
self . grid [ ( ry , rx ) ] = 1 |
def publish_attrs ( self , upcount = 1 ) :
"""Magic function which inject all attrs into the callers namespace
: param upcount int , how many stack levels we go up
: return :""" | frame = inspect . currentframe ( )
i = upcount
while True :
if frame . f_back is None :
break
frame = frame . f_back
i -= 1
if i == 0 :
break
for k , v in self . __dict__ . items ( ) :
frame . f_globals [ k ] = v |
def swap_coords ( filename ) :
"""Swap lat and lon in filename""" | # Read from input file
fid = open ( filename , 'r' )
lines = fid . readlines ( )
fid . close ( )
# Open output file
basename , ext = os . path . splitext ( filename )
fid = open ( basename + '_converted' + ext , 'w' )
# Report
N = len ( lines )
print 'There are %i lines in %s' % ( N , filename )
# Process
reading_positions = False
got_lat = False
got_lon = False
for k , line in enumerate ( lines ) :
s = line . strip ( )
if s . startswith ( position_tag ) : # Swap lat and lon pairs in this line and write back
fid . write ( swap_pairs ( line ) )
elif s . startswith ( lc_tag ) :
fid . write ( swap_pairs ( line , starttag = lc_tag ) )
elif s . startswith ( uc_tag ) :
fid . write ( swap_pairs ( line , starttag = uc_tag ) )
else : # Store back unchanged
fid . write ( line )
fid . close ( ) |
def GetSizeHint ( self , context = None , ** unused_kwargs ) :
"""Retrieves a hint about the size .
Args :
context ( Optional [ DataTypeMapContext ] ) : data type map context , used to
determine the size hint .
Returns :
int : hint of the number of bytes needed from the byte stream or None .""" | context_state = getattr ( context , 'state' , { } )
subcontext = context_state . get ( 'context' , None )
if not subcontext :
mapped_values = context_state . get ( 'mapped_values' , None )
subcontext = DataTypeMapContext ( values = { type ( mapped_values ) . __name__ : mapped_values } )
size_hint = 0
for data_type_map in self . _data_type_maps :
data_type_size = data_type_map . GetSizeHint ( context = subcontext )
if data_type_size is None :
break
size_hint += data_type_size
return size_hint |
def _set_openflow_global ( self , v , load = False ) :
"""Setter method for openflow _ global , mapped from YANG variable / openflow _ global ( container )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ openflow _ global is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ openflow _ global ( ) directly .""" | if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = openflow_global . openflow_global , is_container = 'container' , presence = False , yang_name = "openflow-global" , rest_name = "" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'info' : u'enables openflow and version ' , u'cli-drop-node-name' : None , u'callpoint' : u'OpenflowBasicConfigCallPoint' } } , namespace = 'urn:brocade.com:mgmt:brocade-openflow' , defining_module = 'brocade-openflow' , yang_type = 'container' , is_config = True )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """openflow_global must be of a type compatible with container""" , 'defined-type' : "container" , 'generated-type' : """YANGDynClass(base=openflow_global.openflow_global, is_container='container', presence=False, yang_name="openflow-global", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'enables openflow and version ', u'cli-drop-node-name': None, u'callpoint': u'OpenflowBasicConfigCallPoint'}}, namespace='urn:brocade.com:mgmt:brocade-openflow', defining_module='brocade-openflow', yang_type='container', is_config=True)""" , } )
self . __openflow_global = t
if hasattr ( self , '_set' ) :
self . _set ( ) |
def ARPLimitExceeded_originator_switch_info_switchIpV4Address ( self , ** kwargs ) :
"""Auto Generated Code""" | config = ET . Element ( "config" )
ARPLimitExceeded = ET . SubElement ( config , "ARPLimitExceeded" , xmlns = "http://brocade.com/ns/brocade-notification-stream" )
originator_switch_info = ET . SubElement ( ARPLimitExceeded , "originator-switch-info" )
switchIpV4Address = ET . SubElement ( originator_switch_info , "switchIpV4Address" )
switchIpV4Address . text = kwargs . pop ( 'switchIpV4Address' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config ) |
def herokuapp_robots_view ( request ) :
"""Add restrictive robots . txt to prevent sites under * . herokuapp . com from being indexed .""" | response_text = ""
http_host = request . get_host ( )
if http_host and 'herokuapp' in http_host :
response_text = "User-agent: *\nDisallow: /"
return HttpResponse ( response_text , content_type = "text/plain" ) |
def country ( from_key = 'name' , to_key = 'iso' ) :
"""Creates and returns a mapper function to access country data .
The mapper function that is returned must be called with one argument . In
the default case you call it with a name and it returns a 3 - letter
ISO _ 3166-1 code , e . g . called with ` ` Spain ` ` it would return ` ` ESP ` ` .
: param from _ key : ( optional ) the country attribute you give as input .
Defaults to ` ` name ` ` .
: param to _ key : ( optional ) the country attribute you want as output .
Defaults to ` ` iso ` ` .
: return : mapper
: rtype : function""" | gc = GeonamesCache ( )
dataset = gc . get_dataset_by_key ( gc . get_countries ( ) , from_key )
def mapper ( input ) : # For country name inputs take the names mapping into account .
if 'name' == from_key :
input = mappings . country_names . get ( input , input )
# If there is a record return the demanded attribute .
item = dataset . get ( input )
if item :
return item [ to_key ]
return mapper |
def to_bytes ( self , inst_data , encoding = None ) :
"""Transform an object into : class : ` bytes ` .
: param object inst _ data : object to encode
: param str encoding : character set used to encode the bytes
returned from the ` ` dumps ` ` function . This defaults to
: attr : ` default _ encoding `
: returns : : class : ` tuple ` of the selected content
type and the : class : ` bytes ` representation of
` inst _ data `""" | selected = encoding or self . default_encoding
content_type = '{0}; charset="{1}"' . format ( self . content_type , selected )
dumped = self . _dumps ( escape . recursive_unicode ( inst_data ) )
return content_type , dumped . encode ( selected ) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.