signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def get_direct_command ( self , command_url ) :
"""Send raw command via get"""
|
self . logger . info ( "get_direct_command: %s" , command_url )
req = requests . get ( command_url , timeout = self . timeout , auth = requests . auth . HTTPBasicAuth ( self . username , self . password ) )
self . http_code = req . status_code
req . raise_for_status ( )
return req
|
def openQuickSettingsSettings ( self ) :
'''Opens the Quick Settings shade and then tries to open Settings from there .'''
|
STATUS_BAR_SETTINGS_SETTINGS_BUTTON = [ u"Settings" , u"Cài đặt" , u"Instellingen" , u"Կարգավորումներ" , u"设置" , u"Nastavitve" , u"සැකසීම්" , u"Ayarlar" , u"Setelan" , u"Настройки" , u"تنظیمات" , u"Mga Setting" , u"Тохиргоо" , u"Configuració" , u"Setări" , u"Налады" , u"Einstellungen" , u"პარამეტრები" , u"सेटिङहरू" , u"Կարգավորումներ" , u"Nustatymai" , u"Beállítások" , u"設定" , u"सेटिंग" , u"Настройки" , u"Inställningar" , u"設定" , u"ການຕັ້ງຄ່າ" , u"Configurações" , u"Tetapan" , u"설정" , u"ការកំណត់" , u"Ajustes" , u"הגדרות" , u"Ustawienia" , u"Nastavení" , u"Ρυθμίσεις" , u"Тохиргоо" , u"Ayarlar" , u"Indstillinger" , u"Налаштування" , u"Mipangilio" , u"Izilungiselelo" , u"設定" , u"Nastavenia" , u"Paramètres" , u"ቅንብሮች" , u"การตั้งค่า" , u"Seaded" , u"Iestatījumi" , u"Innstillinger" , u"Подешавања" , u"الإعدادات" , u"සැකසීම්" , u"Definições" , u"Configuración" , u"პარამეტრები" , u"Postavke" , u"Ayarlar" , u"Impostazioni" , u"Asetukset" , u"Instellings" , u"Seaded" , u"ការកំណត់" , u"सेटिङहरू" , u"Tetapan" ]
self . openQuickSettings ( )
# this works on API > = 20
found = False
for s in STATUS_BAR_SETTINGS_SETTINGS_BUTTON :
if DEBUG :
print >> sys . stderr , u"finding view with cd=" , type ( s )
view = self . vc . findViewWithContentDescription ( u'''{0}''' . format ( s ) )
if view :
found = True
view . touch ( )
break
if not found : # for previous APIs , let ' s find the text
for s in STATUS_BAR_SETTINGS_SETTINGS_BUTTON :
if DEBUG :
print >> sys . stderr , "s=" , type ( s )
try :
print >> sys . stderr , "finding view with text=" , u'''{0}''' . format ( s )
except :
pass
view = self . vc . findViewWithText ( s )
if view :
found = True
view . touch ( )
break
if not found :
raise ViewNotFoundException ( "content-description" , "'Settings' or text 'Settings'" , "ROOT" )
self . vc . sleep ( 1 )
self . vc . dump ( window = - 1 )
|
def create ( self , domain_name , partner_id = None ) :
"""Register a domain you control with netki as a Gem - managed domain .
Note : After registering a domain , unless you have already set up its
DNSSEC / DS Records , you ' ll need to do so : http : / / docs . netki . apiary . io
The information required will be an attribute of the returned NetkiDomain
object .
Args :
domain _ name ( str ) : Domain to add ( e . g . ' gem . co ' )
partner _ id ( str , optional ) : your netki partner _ id ( if you have one )
Returns : The new round . NetkiDomain"""
|
params = dict ( domain_name = domain_name )
if partner_id :
params [ 'partner_id' ] = partner_id
domain = self . wrap ( self . resource . create ( params ) )
self . add ( domain )
return domain
|
def future_raise ( self , tp , value = None , tb = None ) :
"""raise _ implementation from future . utils"""
|
if value is not None and isinstance ( tp , Exception ) :
raise TypeError ( "instance exception may not have a separate value" )
if value is not None :
exc = tp ( value )
else :
exc = tp
if exc . __traceback__ is not tb :
raise exc . with_traceback ( tb )
raise exc
|
def parse ( yaml , validate = True ) :
"""Parse the given YAML data into a ` Config ` object , optionally validating it first .
: param yaml : YAML data ( either a string , a stream , or pre - parsed Python dict / list )
: type yaml : list | dict | str | file
: param validate : Whether to validate the data before attempting to parse it .
: type validate : bool
: return : Config object
: rtype : valohai _ yaml . objs . Config"""
|
data = read_yaml ( yaml )
if validate : # pragma : no branch
from . validation import validate
validate ( data , raise_exc = True )
return Config . parse ( data )
|
def get_dictionary ( self ) :
"""Returns the list of parameters and a dictionary of values
( good for writing to a databox header ! )
Return format is sorted _ keys , dictionary"""
|
# output
k = list ( )
d = dict ( )
# loop over the root items
for i in range ( self . _widget . topLevelItemCount ( ) ) : # grab the parameter item , and start building the name
x = self . _widget . topLevelItem ( i ) . param
# now start the recursive loop
self . _get_parameter_dictionary ( '' , d , k , x )
return k , d
|
def freeze_variables ( stop_gradient = True , skip_collection = False ) :
"""Return a context to freeze variables ,
by wrapping ` ` tf . get _ variable ` ` with a custom getter .
It works by either applying ` ` tf . stop _ gradient ` ` on the variables ,
or by keeping them out of the ` ` TRAINABLE _ VARIABLES ` ` collection , or
both .
Example :
. . code - block : : python
with varreplace . freeze _ variable ( stop _ gradient = False , skip _ collection = True ) :
x = FullyConnected ( ' fc ' , x , 1000 ) # fc / * will not be trained
Args :
stop _ gradient ( bool ) : if True , variables returned from ` get _ variable `
will be wrapped with ` tf . stop _ gradient ` and therefore has no
gradient when used later .
Note that the created variables may still have gradient when accessed
by other approaches ( e . g . by name , or by collection ) .
Also note that this makes ` tf . get _ variable ` returns a Tensor instead of a Variable ,
which may break existing code .
Therefore , it ' s recommended to use the ` skip _ collection ` option instead .
skip _ collection ( bool ) : if True , do not add the variable to
` ` TRAINABLE _ VARIABLES ` ` collection , but to ` ` MODEL _ VARIABLES ` `
collection . As a result they will not be trained by default ."""
|
def custom_getter ( getter , * args , ** kwargs ) :
trainable = kwargs . get ( 'trainable' , True )
name = args [ 0 ] if len ( args ) else kwargs . get ( 'name' )
if skip_collection :
kwargs [ 'trainable' ] = False
v = getter ( * args , ** kwargs )
if skip_collection :
tf . add_to_collection ( tf . GraphKeys . MODEL_VARIABLES , v )
if trainable and stop_gradient :
v = tf . stop_gradient ( v , name = 'freezed_' + name )
return v
return custom_getter_scope ( custom_getter )
|
def add_indicators ( self , indicators = list ( ) , private = False , tags = list ( ) ) :
"""Add indicators to the remote instance ."""
|
if len ( indicators ) == 0 :
raise Exception ( "No indicators were identified." )
self . logger . debug ( "Checking {} indicators" . format ( len ( indicators ) ) )
cleaned = clean_indicators ( indicators )
self . logger . debug ( "Cleaned {} indicators" . format ( len ( cleaned ) ) )
whitelisted = check_whitelist ( cleaned )
self . logger . debug ( "Non-whitelisted {} indicators" . format ( len ( whitelisted ) ) )
indicators = prune_cached ( whitelisted )
hashed = hash_values ( indicators )
self . logger . debug ( "Non-cached {} indicators" . format ( len ( indicators ) ) )
self . logger . debug ( "Processing {} indicators" . format ( len ( indicators ) ) )
request_count = int ( math . ceil ( len ( indicators ) / 100.0 ) )
if request_count == 0 :
mesg = "[!] No indicators were left to process after "
mesg += "cleaning, whitelisting and checking the cache."
return { 'message' : mesg }
stats = { 'success' : 0 , 'failure' : 0 , 'requests' : request_count , 'written' : 0 }
mesg = "{} indicators found, making {} requests"
self . logger . debug ( mesg . format ( len ( indicators ) , request_count ) )
if private :
indicators = hashed
if type ( tags ) == str :
tags = [ t . strip ( ) . lower ( ) for t in tags . split ( ',' ) ]
start , end = ( 0 , 100 )
for i , idx in enumerate ( range ( 0 , request_count ) ) :
if idx > 0 :
time . sleep ( 3 )
# Ensure we never trip the limit
self . logger . debug ( "Waiting 3 seconds before next request." )
to_send = { 'indicators' : indicators [ start : end ] , 'tags' : tags }
r = self . _send_data ( 'POST' , 'admin' , 'add-indicators' , to_send )
start , end = ( end , end + 100 )
if not r [ 'success' ] :
stats [ 'failure' ] += 1
continue
stats [ 'success' ] += 1
stats [ 'written' ] += r [ 'writeCount' ]
cache_items ( to_send [ 'indicators' ] )
msg = ""
msg += "{written} indicators written using {requests} requests: "
msg += "{success} success, {failure} failure"
stats [ 'message' ] = msg . format ( ** stats )
return stats
|
def cache_last_modified ( request , * argz , ** kwz ) :
'''Last modification date for a cached page .
Intended for usage in conditional views ( @ condition decorator ) .'''
|
response , site , cachekey = kwz . get ( '_view_data' ) or initview ( request )
if not response :
return None
return response [ 1 ]
|
def iterGet ( self , objectType , * args , ** coolArgs ) :
"""Same as get . But retuns the elements one by one , much more efficient for large outputs"""
|
for e in self . _makeLoadQuery ( objectType , * args , ** coolArgs ) . iterRun ( ) :
if issubclass ( objectType , pyGenoRabaObjectWrapper ) :
yield objectType ( wrapped_object_and_bag = ( e , self . bagKey ) )
else :
yield e
|
def SetStorageProfiler ( self , storage_profiler ) :
"""Sets the storage profiler .
Args :
storage _ profiler ( StorageProfiler ) : storage profiler ."""
|
self . _storage_profiler = storage_profiler
if self . _storage_file :
self . _storage_file . SetStorageProfiler ( storage_profiler )
|
def set_contents_from_filename ( self , filename , headers = None , replace = True , cb = None , num_cb = 10 , policy = None , md5 = None , reduced_redundancy = False , encrypt_key = False ) :
"""Store an object in S3 using the name of the Key object as the
key in S3 and the contents of the file named by ' filename ' .
See set _ contents _ from _ file method for details about the
parameters .
: type filename : string
: param filename : The name of the file that you want to put onto S3
: type headers : dict
: param headers : Additional headers to pass along with the
request to AWS .
: type replace : bool
: param replace : If True , replaces the contents of the file
if it already exists .
: type cb : function
: param cb : a callback function that will be called to report
progress on the upload . The callback should accept
two integer parameters , the first representing the
number of bytes that have been successfully
transmitted to S3 and the second representing the
size of the to be transmitted object .
: type cb : int
: param num _ cb : ( optional ) If a callback is specified with
the cb parameter this parameter determines the
granularity of the callback by defining
the maximum number of times the callback will
be called during the file transfer .
: type policy : : class : ` boto . s3 . acl . CannedACLStrings `
: param policy : A canned ACL policy that will be applied to the
new key in S3.
: type md5 : A tuple containing the hexdigest version of the MD5
checksum of the file as the first element and the
Base64 - encoded version of the plain checksum as the
second element . This is the same format returned by
the compute _ md5 method .
: param md5 : If you need to compute the MD5 for any reason prior
to upload , it ' s silly to have to do it twice so this
param , if present , will be used as the MD5 values
of the file . Otherwise , the checksum will be computed .
: type reduced _ redundancy : bool
: param reduced _ redundancy : If True , this will set the storage
class of the new Key to be
REDUCED _ REDUNDANCY . The Reduced Redundancy
Storage ( RRS ) feature of S3 , provides lower
redundancy at lower storage cost .
: type encrypt _ key : bool
: param encrypt _ key : If True , the new copy of the object will
be encrypted on the server - side by S3 and
will be stored in an encrypted form while
at rest in S3."""
|
fp = open ( filename , 'rb' )
self . set_contents_from_file ( fp , headers , replace , cb , num_cb , policy , md5 , reduced_redundancy , encrypt_key = encrypt_key )
fp . close ( )
|
def _save_file ( self , data , path ) :
"""Save an file to the specified path .
: param data : binary data of the file
: param path : path to save the file to"""
|
with open ( path , 'wb' ) as tfile :
for chunk in data :
tfile . write ( chunk )
|
def ShlexSplit ( string ) :
"""A wrapper for ` shlex . split ` that works with unicode objects .
Args :
string : A unicode string to split .
Returns :
A list of unicode strings representing parts of the input string ."""
|
precondition . AssertType ( string , Text )
if PY2 :
string = string . encode ( "utf-8" )
parts = shlex . split ( string )
if PY2 : # TODO ( hanuszczak ) : https : / / github . com / google / pytype / issues / 127
# pytype : disable = attribute - error
parts = [ part . decode ( "utf-8" ) for part in parts ]
# pytype : enable = attribute - error
return parts
|
def EMAIL_REQUIRED ( self ) :
"""The user is required to hand over an e - mail address when signing up"""
|
from allauth . account import app_settings as account_settings
return self . _setting ( "EMAIL_REQUIRED" , account_settings . EMAIL_REQUIRED )
|
def query_by_slug ( slug ) :
'''查询全部章节'''
|
cat_rec = MCategory . get_by_slug ( slug )
if cat_rec :
cat_id = cat_rec . uid
else :
return None
if cat_id . endswith ( '00' ) :
cat_con = TabPost2Tag . par_id == cat_id
else :
cat_con = TabPost2Tag . tag_id == cat_id
recs = TabPost . select ( ) . join ( TabPost2Tag , on = ( TabPost . uid == TabPost2Tag . post_id ) ) . where ( cat_con ) . order_by ( TabPost . time_update . desc ( ) )
return recs
|
def emoticons_filter ( content , exclude = '' , autoescape = None ) :
"""Filter for rendering emoticons ."""
|
esc = autoescape and conditional_escape or ( lambda x : x )
content = mark_safe ( replace_emoticons ( esc ( content ) , exclude ) )
return content
|
def setSpeedFactor ( self , typeID , factor ) :
"""setSpeedFactor ( string , double ) - > None"""
|
self . _connection . _sendDoubleCmd ( tc . CMD_SET_VEHICLETYPE_VARIABLE , tc . VAR_SPEED_FACTOR , typeID , factor )
|
def do_create ( self , line ) :
"create { tablename } [ - c rc , wc ] { hkey } [ : { type } { rkey } : { type } ]"
|
args = self . getargs ( line )
rc = wc = 5
name = args . pop ( 0 )
# tablename
if args [ 0 ] == "-c" : # capacity
args . pop ( 0 )
# skyp - c
capacity = args . pop ( 0 ) . strip ( )
rc , _ , wc = capacity . partition ( "," )
rc = int ( rc )
wc = int ( wc ) if wc != "" else rc
schema = [ ]
hkey , _ , hkey_type = args . pop ( 0 ) . partition ( ':' )
hkey_type = self . get_type ( hkey_type or 'S' )
schema . append ( boto . dynamodb2 . fields . HashKey ( hkey , hkey_type ) )
if args :
rkey , _ , rkey_type = args . pop ( 0 ) . partition ( ':' )
rkey_type = self . get_type ( rkey_type or 'S' )
schema . append ( boto . dynamodb2 . fields . RangeKey ( rkey , rkey_type ) )
t = boto . dynamodb2 . table . Table . create ( name , schema = schema , throughput = { 'read' : rc , 'write' : wc } )
self . pprint ( t . describe ( ) )
|
def new_from_list ( cls , content , fill_title = True , ** kwargs ) :
"""Populates the Table with a list of tuples of strings .
Args :
content ( list ) : list of tuples of strings . Each tuple is a row .
fill _ title ( bool ) : if true , the first tuple in the list will
be set as title"""
|
obj = cls ( ** kwargs )
obj . append_from_list ( content , fill_title )
return obj
|
def dynamic_content_item_variant_delete ( self , item_id , id , ** kwargs ) :
"https : / / developer . zendesk . com / rest _ api / docs / core / dynamic _ content # delete - variant"
|
api_path = "/api/v2/dynamic_content/items/{item_id}/variants/{id}.json"
api_path = api_path . format ( item_id = item_id , id = id )
return self . call ( api_path , method = "DELETE" , ** kwargs )
|
def deserialize_footer ( stream , verifier = None ) :
"""Deserializes a footer .
: param stream : Source data stream
: type stream : io . BytesIO
: param verifier : Signature verifier object ( optional )
: type verifier : aws _ encryption _ sdk . internal . crypto . Verifier
: returns : Deserialized footer
: rtype : aws _ encryption _ sdk . internal . structures . MessageFooter
: raises SerializationError : if verifier supplied and no footer found"""
|
_LOGGER . debug ( "Starting footer deserialization" )
signature = b""
if verifier is None :
return MessageFooter ( signature = signature )
try :
( sig_len , ) = unpack_values ( ">H" , stream )
( signature , ) = unpack_values ( ">{sig_len}s" . format ( sig_len = sig_len ) , stream )
except SerializationError :
raise SerializationError ( "No signature found in message" )
if verifier :
verifier . verify ( signature )
return MessageFooter ( signature = signature )
|
def docCopyNodeList ( self , doc ) :
"""Do a recursive copy of the node list ."""
|
if doc is None :
doc__o = None
else :
doc__o = doc . _o
ret = libxml2mod . xmlDocCopyNodeList ( doc__o , self . _o )
if ret is None :
raise treeError ( 'xmlDocCopyNodeList() failed' )
__tmp = xmlNode ( _obj = ret )
return __tmp
|
def topics_in ( self , d , topn = 5 ) :
"""List the top ` ` topn ` ` topics in document ` ` d ` ` ."""
|
return self . theta . features [ d ] . top ( topn )
|
def build_request ( headers : Headers ) -> str :
"""Build a handshake request to send to the server .
Return the ` ` key ` ` which must be passed to : func : ` check _ response ` ."""
|
raw_key = bytes ( random . getrandbits ( 8 ) for _ in range ( 16 ) )
key = base64 . b64encode ( raw_key ) . decode ( )
headers [ "Upgrade" ] = "websocket"
headers [ "Connection" ] = "Upgrade"
headers [ "Sec-WebSocket-Key" ] = key
headers [ "Sec-WebSocket-Version" ] = "13"
return key
|
def _callbackPlaceFillOrders ( self , d ) :
"""This method distringuishes notifications caused by Matched orders
from those caused by placed orders"""
|
if isinstance ( d , FilledOrder ) :
self . onOrderMatched ( d )
elif isinstance ( d , Order ) :
self . onOrderPlaced ( d )
elif isinstance ( d , UpdateCallOrder ) :
self . onUpdateCallOrder ( d )
else :
pass
|
def _open_all_rings ( self ) :
"""Having already generated all unique fragments that did not require ring opening ,
now we want to also obtain fragments that do require opening . We achieve this by
looping through all unique fragments and opening each bond present in any ring
we find . We also temporarily add the principle molecule graph to self . unique _ fragments
so that its rings are opened as well ."""
|
self . unique_fragments . insert ( 0 , self . mol_graph )
for fragment in self . unique_fragments :
ring_edges = fragment . find_rings ( )
if ring_edges != [ ] :
for bond in ring_edges [ 0 ] :
new_fragment = open_ring ( fragment , [ bond ] , self . opt_steps )
found = False
for unique_fragment in self . unique_fragments :
if unique_fragment . isomorphic_to ( new_fragment ) :
found = True
break
if not found :
self . unique_fragments_from_ring_openings . append ( new_fragment )
self . unique_fragments . append ( new_fragment )
# Finally , remove the principle molecule graph :
self . unique_fragments . pop ( 0 )
|
def format_name ( self , name , indent_size = 4 ) :
"""Format the name of this verifier
The name will be formatted as :
< name > : < short description >
long description if one is given followed by \n
otherwise no long description
Args :
name ( string ) : A name for this validator
indent _ size ( int ) : The number of spaces to indent the
description
Returns :
string : The formatted name block with a short and or long
description appended ."""
|
name_block = ''
if self . short_desc is None :
name_block += name + '\n'
else :
name_block += name + ': ' + self . short_desc + '\n'
if self . long_desc is not None :
name_block += self . wrap_lines ( self . long_desc , 1 , indent_size = indent_size )
name_block += '\n'
return name_block
|
def restore ( file_name , jail = None , chroot = None , root = None ) :
'''Reads archive created by pkg backup - d and recreates the database .
CLI Example :
. . code - block : : bash
salt ' * ' pkg . restore / tmp / pkg
jail
Restore database to the specified jail . Note that this will run the
command within the jail , and so the path to the file from which the pkg
database will be restored is relative to the root of the jail .
CLI Example :
. . code - block : : bash
salt ' * ' pkg . restore / tmp / pkg jail = < jail name or id >
chroot
Restore database to the specified chroot ( ignored if ` ` jail ` ` is
specified ) . Note that this will run the command within the chroot , and
so the path to the file from which the pkg database will be restored is
relative to the root of the chroot .
root
Restore database to the specified root ( ignored if ` ` jail ` ` is
specified ) . Note that this will run the command within the root , and
so the path to the file from which the pkg database will be restored is
relative to the root of the root .
CLI Example :
. . code - block : : bash
salt ' * ' pkg . restore / tmp / pkg chroot = / path / to / chroot'''
|
return __salt__ [ 'cmd.run' ] ( _pkg ( jail , chroot , root ) + [ 'backup' , '-r' , file_name ] , output_loglevel = 'trace' , python_shell = False )
|
def delete ( self ) :
"""Delete this cluster .
For example :
. . literalinclude : : snippets . py
: start - after : [ START bigtable _ delete _ cluster ]
: end - before : [ END bigtable _ delete _ cluster ]
Marks a cluster and all of its tables for permanent deletion in 7 days .
Immediately upon completion of the request :
* Billing will cease for all of the cluster ' s reserved resources .
* The cluster ' s ` ` delete _ time ` ` field will be set 7 days in the future .
Soon afterward :
* All tables within the cluster will become unavailable .
At the cluster ' s ` ` delete _ time ` ` :
* The cluster and * * all of its tables * * will immediately and
irrevocably disappear from the API , and their data will be
permanently deleted ."""
|
client = self . _instance . _client
client . instance_admin_client . delete_cluster ( self . name )
|
def get_subject ( self , lang = None ) :
"""Get the subject of the object
: param lang : Lang to retrieve
: return : Subject string representation
: rtype : Literal"""
|
return self . metadata . get_single ( key = DC . subject , lang = lang )
|
def plot ( self , minx = - 1.5 , maxx = 1.2 , miny = - 0.2 , maxy = 2 , ** kwargs ) :
"""Helper function to plot the Muller potential"""
|
import matplotlib . pyplot as pp
grid_width = max ( maxx - minx , maxy - miny ) / 200.0
ax = kwargs . pop ( 'ax' , None )
xx , yy = np . mgrid [ minx : maxx : grid_width , miny : maxy : grid_width ]
V = self . potential ( xx , yy )
# clip off any values greater than 200 , since they mess up
# the color scheme
if ax is None :
ax = pp
ax . contourf ( xx , yy , V . clip ( max = 200 ) , 40 , ** kwargs )
|
def upload_segmentation_image ( self , mapobject_type_name , plate_name , well_name , well_pos_y , well_pos_x , tpoint , zplane , image ) :
'''Uploads a segmentation image .
Parameters
mapobject _ type _ name : str
name of the segmented objects
plate _ name : str
name of the plate
well _ name : str
name of the well in which the image is located
well _ pos _ y : int
y - position of the site relative to the well grid
well _ pos _ x : int
x - position of the site relative to the well grid
tpoint : int , optional
zero - based time point index ( default : ` ` 0 ` ` )
zplane : int , optional
zero - based z - plane index ( default : ` ` 0 ` ` )
image : numpy . ndarray [ numpy . int32]
labeled array
Raises
TypeError
when ` image ` is not provided in form of a ` numpy ` array
ValueError
when ` image ` doesn ' t have 32 - bit unsigned integer data type
See also
: func : ` tmserver . api . mapobject . add _ segmentations `
: class : ` tmlib . models . mapobject . MapobjectSegmentation `'''
|
if not isinstance ( image , np . ndarray ) :
raise TypeError ( 'Image must be provided in form of a numpy array.' )
if image . dtype != np . int32 :
raise ValueError ( 'Image must have 32-bit integer data type.' )
self . _upload_segmentation_image ( mapobject_type_name , plate_name , well_name , well_pos_y , well_pos_x , tpoint , zplane , image )
|
def frombase ( path1 , path2 ) : # type : ( Text , Text ) - > Text
"""Get the final path of ` ` path2 ` ` that isn ' t in ` ` path1 ` ` .
Arguments :
path1 ( str ) : A PyFilesytem path .
path2 ( str ) : A PyFilesytem path .
Returns :
str : the final part of ` ` path2 ` ` .
Example :
> > > frombase ( ' foo / bar / ' , ' foo / bar / baz / egg ' )
' baz / egg '"""
|
if not isparent ( path1 , path2 ) :
raise ValueError ( "path1 must be a prefix of path2" )
return path2 [ len ( path1 ) : ]
|
def flush ( self , timeout = None ) :
"""Invoking this method makes all buffered records immediately available
to send ( even if linger _ ms is greater than 0 ) and blocks on the
completion of the requests associated with these records . The
post - condition of : meth : ` ~ kafka . KafkaProducer . flush ` is that any
previously sent record will have completed
( e . g . Future . is _ done ( ) = = True ) . A request is considered completed when
either it is successfully acknowledged according to the ' acks '
configuration for the producer , or it results in an error .
Other threads can continue sending messages while one thread is blocked
waiting for a flush call to complete ; however , no guarantee is made
about the completion of messages sent after the flush call begins .
Arguments :
timeout ( float , optional ) : timeout in seconds to wait for completion .
Raises :
KafkaTimeoutError : failure to flush buffered records within the
provided timeout"""
|
log . debug ( "Flushing accumulated records in producer." )
# trace
self . _accumulator . begin_flush ( )
self . _sender . wakeup ( )
self . _accumulator . await_flush_completion ( timeout = timeout )
|
def get_cidr_name ( cidr , ip_ranges_files , ip_ranges_name_key ) :
"""Read display name for CIDRs from ip - ranges files
: param cidr :
: param ip _ ranges _ files :
: param ip _ ranges _ name _ key :
: return :"""
|
for filename in ip_ranges_files :
ip_ranges = read_ip_ranges ( filename , local_file = True )
for ip_range in ip_ranges :
ip_prefix = netaddr . IPNetwork ( ip_range [ 'ip_prefix' ] )
cidr = netaddr . IPNetwork ( cidr )
if cidr in ip_prefix :
return ip_range [ ip_ranges_name_key ] . strip ( )
for ip_range in aws_ip_ranges :
ip_prefix = netaddr . IPNetwork ( ip_range [ 'ip_prefix' ] )
cidr = netaddr . IPNetwork ( cidr )
if cidr in ip_prefix :
return 'Unknown CIDR in %s %s' % ( ip_range [ 'service' ] , ip_range [ 'region' ] )
return 'Unknown CIDR'
|
def _add_model ( self , model_list_or_dict , core_element , model_class , model_key = None , load_meta_data = True ) :
"""Adds one model for a given core element .
The method will add a model for a given core object and checks if there is a corresponding model object in the
future expected model list . The method does not check if an object with corresponding model has already been
inserted .
: param model _ list _ or _ dict : could be a list or dictionary of one model type
: param core _ element : the core element to a model for , can be state or state element
: param model _ class : model - class of the elements that should be insert
: param model _ key : if model _ list _ or _ dict is a dictionary the key is the id of the respective element
( e . g . ' state _ id ' )
: param load _ meta _ data : specific argument for loading meta data
: return :"""
|
found_model = self . _get_future_expected_model ( core_element )
if found_model :
found_model . parent = self
if model_class is IncomeModel :
self . income = found_model if found_model else IncomeModel ( core_element , self )
return
if model_key is None :
model_list_or_dict . append ( found_model if found_model else model_class ( core_element , self ) )
else :
model_list_or_dict [ model_key ] = found_model if found_model else model_class ( core_element , self , load_meta_data = load_meta_data )
|
def update_args ( self , override_args ) :
"""Update the arguments used to invoke the application
Note that this will also update the dictionary of input and output files
Parameters
override _ args : dict
dictionary of arguments to override the current values"""
|
self . args = extract_arguments ( override_args , self . args )
self . _job_configs = self . build_job_configs ( self . args )
if not self . _scatter_link . jobs :
self . _build_job_dict ( )
self . _latch_file_info ( )
|
def fnd_unq_rws ( A , return_index = False , return_inverse = False ) :
"""Find unique rows in 2D array .
Parameters
A : 2d numpy array
Array for which unique rows should be identified .
return _ index : bool
Bool to decide whether I is returned .
return _ inverse : bool
Bool to decide whether J is returned .
Returns
B : 1d numpy array ,
Unique rows
I : 1d numpy array , only returned if return _ index is True
B = A [ I , : ]
J : 2d numpy array , only returned if return _ inverse is True
A = B [ J , : ]"""
|
A = np . require ( A , requirements = 'C' )
assert A . ndim == 2 , "array must be 2-dim'l"
B = np . unique ( A . view ( [ ( '' , A . dtype ) ] * A . shape [ 1 ] ) , return_index = return_index , return_inverse = return_inverse )
if return_index or return_inverse :
return ( B [ 0 ] . view ( A . dtype ) . reshape ( ( - 1 , A . shape [ 1 ] ) , order = 'C' ) , ) + B [ 1 : ]
else :
return B . view ( A . dtype ) . reshape ( ( - 1 , A . shape [ 1 ] ) , order = 'C' )
|
def create_basic_op_node ( op_name , node , kwargs ) :
"""Helper function to create a basic operator
node that doesn ' t contain op specific attrs"""
|
name , input_nodes , _ = get_inputs ( node , kwargs )
node = onnx . helper . make_node ( op_name , input_nodes , [ name ] , name = name )
return [ node ]
|
def p_node_expression ( self , t ) :
'''node _ expression : IDENT'''
|
if len ( t ) < 3 :
t [ 0 ] = t [ 1 ]
# print ( t [ 1]
self . accu . add ( Term ( 'vertex' , [ "gen(\"" + t [ 1 ] + "\")" ] ) )
else :
t [ 0 ] = "unknown"
|
def _add_default_options ( self ) -> None :
"""Add default command line options to the parser ."""
|
# Updating the trust stores
update_stores_group = OptionGroup ( self . _parser , 'Trust stores options' , '' )
update_stores_group . add_option ( '--update_trust_stores' , help = 'Update the default trust stores used by SSLyze. The latest stores will be downloaded from ' 'https://github.com/nabla-c0d3/trust_stores_observatory. This option is meant to be used separately, ' 'and will silence any other command line option supplied to SSLyze.' , dest = 'update_trust_stores' , action = 'store_true' , )
self . _parser . add_option_group ( update_stores_group )
# Client certificate options
clientcert_group = OptionGroup ( self . _parser , 'Client certificate options' , '' )
clientcert_group . add_option ( '--cert' , help = 'Client certificate chain filename. The certificates must be in PEM format and must be sorted ' 'starting with the subject\'s client certificate, followed by intermediate CA certificates if ' 'applicable.' , dest = 'cert' )
clientcert_group . add_option ( '--key' , help = 'Client private key filename.' , dest = 'key' )
clientcert_group . add_option ( '--keyform' , help = 'Client private key format. DER or PEM (default).' , dest = 'keyform' , default = 'PEM' )
clientcert_group . add_option ( '--pass' , help = 'Client private key passphrase.' , dest = 'keypass' , default = '' )
self . _parser . add_option_group ( clientcert_group )
# Input / output
output_group = OptionGroup ( self . _parser , 'Input and output options' , '' )
# XML output
output_group . add_option ( '--xml_out' , help = 'Write the scan results as an XML document to the file XML_FILE. If XML_FILE is set to "-", the XML ' 'output will instead be printed to stdout. The corresponding XML Schema Definition is available at ' './docs/xml_out.xsd' , dest = 'xml_file' , default = None )
# JSON output
output_group . add_option ( '--json_out' , help = 'Write the scan results as a JSON document to the file JSON_FILE. If JSON_FILE is set to "-", the ' 'JSON output will instead be printed to stdout. The resulting JSON file is a serialized version of ' 'the ScanResult objects described in SSLyze\'s Python API: the nodes and attributes will be the same. ' 'See https://nabla-c0d3.github.io/sslyze/documentation/available-scan-commands.html for more details.' , dest = 'json_file' , default = None )
# Read targets from input file
output_group . add_option ( '--targets_in' , help = 'Read the list of targets to scan from the file TARGETS_IN. It should contain one host:port per ' 'line.' , dest = 'targets_in' , default = None )
# No text output
output_group . add_option ( '--quiet' , action = 'store_true' , dest = 'quiet' , help = 'Do not output anything to stdout; useful when using --xml_out or --json_out.' )
self . _parser . add_option_group ( output_group )
# Connectivity option group
connect_group = OptionGroup ( self . _parser , 'Connectivity options' , '' )
# Connection speed
connect_group . add_option ( '--slow_connection' , help = 'Greatly reduce the number of concurrent connections initiated by SSLyze. This will make the scans ' 'slower but more reliable if the connection between your host and the server is slow, or if the ' 'server cannot handle many concurrent connections. Enable this option if you are getting a lot of ' 'timeouts or errors.' , action = 'store_true' , dest = 'slow_connection' , )
# HTTP CONNECT Proxy
connect_group . add_option ( '--https_tunnel' , help = 'Tunnel all traffic to the target server(s) through an HTTP CONNECT proxy. HTTP_TUNNEL should be the ' 'proxy\'s URL: \'http://USER:PW@HOST:PORT/\'. For proxies requiring authentication, only Basic ' 'Authentication is supported.' , dest = 'https_tunnel' , default = None )
# STARTTLS
connect_group . add_option ( '--starttls' , help = 'Perform a StartTLS handshake when connecting to the target server(s). ' '{}' . format ( self . START_TLS_USAGE ) , dest = 'starttls' , default = None )
connect_group . add_option ( '--xmpp_to' , help = 'Optional setting for STARTTLS XMPP. XMPP_TO should be the hostname to be put in the \'to\' ' 'attribute of the XMPP stream. Default is the server\'s hostname.' , dest = 'xmpp_to' , default = None )
# Server Name Indication
connect_group . add_option ( '--sni' , help = 'Use Server Name Indication to specify the hostname to connect to. Will only affect TLS 1.0+ ' 'connections.' , dest = 'sni' , default = None )
self . _parser . add_option_group ( connect_group )
|
def _load_json_config ( self ) :
"""Load the configuration file in JSON format
: rtype : dict"""
|
try :
return json . loads ( self . _read_config ( ) )
except ValueError as error :
raise ValueError ( 'Could not read configuration file: {}' . format ( error ) )
|
def replace_apply_state ( meta_graph , state_ops , feed_map ) :
"""Replaces state ops with non state Placeholder ops for the apply graph ."""
|
for node in meta_graph . graph_def . node :
keys_to_purge = [ ]
tensor_name = node . name + ":0"
# Verify that the node is a state op and that its due to be rewired
# in the feedmap .
if node . op in state_ops and tensor_name in feed_map :
node . op = "Placeholder"
for key in node . attr : # Only shape and dtype are required for Placeholder . Remove other
# attributes .
if key != "shape" :
keys_to_purge . append ( key )
for key in keys_to_purge :
del node . attr [ key ]
node . attr [ "dtype" ] . type = types_pb2 . DT_RESOURCE
|
def setup_logging ( ** kwargs ) : # type : ( Any ) - > None
"""Setup logging configuration
Args :
* * kwargs : See below
logging _ config _ dict ( dict ) : Logging configuration dictionary OR
logging _ config _ json ( str ) : Path to JSON Logging configuration OR
logging _ config _ yaml ( str ) : Path to YAML Logging configuration . Defaults to internal logging _ configuration . yml .
smtp _ config _ dict ( dict ) : Email Logging configuration dictionary if using default logging configuration OR
smtp _ config _ json ( str ) : Path to JSON Email Logging configuration if using default logging configuration OR
smtp _ config _ yaml ( str ) : Path to YAML Email Logging configuration if using default logging configuration
Returns :
None"""
|
smtp_config_found = False
smtp_config_dict = kwargs . get ( 'smtp_config_dict' , None )
if smtp_config_dict :
smtp_config_found = True
print ( 'Loading smtp configuration customisations from dictionary' )
smtp_config_json = kwargs . get ( 'smtp_config_json' , '' )
if smtp_config_json :
if smtp_config_found :
raise LoggingError ( 'More than one smtp configuration file given!' )
smtp_config_found = True
print ( 'Loading smtp configuration customisations from: %s' % smtp_config_json )
smtp_config_dict = load_json ( smtp_config_json )
smtp_config_yaml = kwargs . get ( 'smtp_config_yaml' , '' )
if smtp_config_yaml :
if smtp_config_found :
raise LoggingError ( 'More than one smtp configuration file given!' )
smtp_config_found = True
print ( 'Loading smtp configuration customisations from: %s' % smtp_config_yaml )
smtp_config_dict = load_yaml ( smtp_config_yaml )
logging_smtp_config_dict = None
logging_config_found = False
logging_config_dict = kwargs . get ( 'logging_config_dict' , None )
if logging_config_dict :
logging_config_found = True
print ( 'Loading logging configuration from dictionary' )
logging_config_json = kwargs . get ( 'logging_config_json' , '' )
if logging_config_json :
if logging_config_found :
raise LoggingError ( 'More than one logging configuration file given!' )
logging_config_found = True
print ( 'Loading logging configuration from: %s' % logging_config_json )
logging_config_dict = load_json ( logging_config_json )
logging_config_yaml = kwargs . get ( 'logging_config_yaml' , '' )
if logging_config_found :
if logging_config_yaml :
raise LoggingError ( 'More than one logging configuration file given!' )
else :
if not logging_config_yaml :
print ( 'No logging configuration parameter. Using default.' )
logging_config_yaml = script_dir_plus_file ( 'logging_configuration.yml' , setup_logging )
if smtp_config_found :
logging_smtp_config_yaml = script_dir_plus_file ( 'logging_smtp_configuration.yml' , setup_logging )
print ( 'Loading base SMTP logging configuration from: %s' % logging_smtp_config_yaml )
logging_smtp_config_dict = load_yaml ( logging_smtp_config_yaml )
print ( 'Loading logging configuration from: %s' % logging_config_yaml )
logging_config_dict = load_yaml ( logging_config_yaml )
if smtp_config_found :
if logging_smtp_config_dict :
logging_config_dict = merge_dictionaries ( [ logging_config_dict , logging_smtp_config_dict , smtp_config_dict ] )
else :
raise LoggingError ( 'SMTP logging configuration file given but not using default logging configuration!' )
file_only = os . getenv ( 'LOG_FILE_ONLY' )
if file_only is not None and file_only . lower ( ) not in [ 'false' , 'f' , 'n' , 'no' , '0' ] :
root = logging_config_dict . get ( 'root' )
if root is not None :
handlers = root . get ( 'handlers' , list ( ) )
for i , handler in enumerate ( handlers ) :
if handler . lower ( ) == 'console' :
del handlers [ i ]
break
logging . config . dictConfig ( logging_config_dict )
|
def saveWallet ( self , wallet , fpath ) :
"""Save wallet into specified localtion .
Returns the canonical path for the ` ` fpath ` ` where ` ` wallet ` `
has been stored .
Error cases :
- ` ` fpath ` ` is not inside the keyrings base dir - ValueError raised
- directory part of ` ` fpath ` ` exists and it ' s not a directory -
NotADirectoryError raised
- ` ` fpath ` ` exists and it ' s a directory - IsADirectoryError raised
: param wallet : wallet to save
: param fpath : wallet file path , absolute or relative to
keyrings base dir"""
|
if not fpath :
raise ValueError ( "empty path" )
_fpath = self . _normalize ( fpath )
_dpath = _fpath . parent
try :
_dpath . relative_to ( self . _baseDir )
except ValueError :
raise ValueError ( "path {} is not is not relative to the keyrings {}" . format ( fpath , self . _baseDir ) )
self . _createDirIfNotExists ( _dpath )
# ensure permissions from the bottom of the directory hierarchy
while _dpath != self . _baseDir :
self . _ensurePermissions ( _dpath , self . dmode )
_dpath = _dpath . parent
with _fpath . open ( "w" ) as wf :
self . _ensurePermissions ( _fpath , self . fmode )
encodedWallet = self . encode ( wallet )
wf . write ( encodedWallet )
logger . debug ( "stored wallet '{}' in {}" . format ( wallet . name , _fpath ) )
return str ( _fpath )
|
def show_hydrophobic ( self ) :
"""Visualizes hydrophobic contacts ."""
|
hydroph = self . plcomplex . hydrophobic_contacts
if not len ( hydroph . bs_ids ) == 0 :
self . select_by_ids ( 'Hydrophobic-P' , hydroph . bs_ids , restrict = self . protname )
self . select_by_ids ( 'Hydrophobic-L' , hydroph . lig_ids , restrict = self . ligname )
for i in hydroph . pairs_ids :
cmd . select ( 'tmp_bs' , 'id %i & %s' % ( i [ 0 ] , self . protname ) )
cmd . select ( 'tmp_lig' , 'id %i & %s' % ( i [ 1 ] , self . ligname ) )
cmd . distance ( 'Hydrophobic' , 'tmp_bs' , 'tmp_lig' )
if self . object_exists ( 'Hydrophobic' ) :
cmd . set ( 'dash_gap' , 0.5 , 'Hydrophobic' )
cmd . set ( 'dash_color' , 'grey50' , 'Hydrophobic' )
else :
cmd . select ( 'Hydrophobic-P' , 'None' )
|
def send_document ( url , data , timeout = 10 , * args , ** kwargs ) :
"""Helper method to send a document via POST .
Additional ` ` * args ` ` and ` ` * * kwargs ` ` will be passed on to ` ` requests . post ` ` .
: arg url : Full url to send to , including protocol
: arg data : Dictionary ( will be form - encoded ) , bytes , or file - like object to send in the body
: arg timeout : Seconds to wait for response ( defaults to 10)
: returns : Tuple of status code ( int or None ) and error ( exception class instance or None )"""
|
logger . debug ( "send_document: url=%s, data=%s, timeout=%s" , url , data , timeout )
headers = CaseInsensitiveDict ( { 'User-Agent' : USER_AGENT , } )
if "headers" in kwargs : # Update from kwargs
headers . update ( kwargs . get ( "headers" ) )
kwargs . update ( { "data" : data , "timeout" : timeout , "headers" : headers } )
try :
response = requests . post ( url , * args , ** kwargs )
logger . debug ( "send_document: response status code %s" , response . status_code )
return response . status_code , None
except RequestException as ex :
logger . debug ( "send_document: exception %s" , ex )
return None , ex
|
def sign_with_privkey ( digest : bytes , privkey : Ed25519PrivateKey , global_pubkey : Ed25519PublicPoint , nonce : int , global_commit : Ed25519PublicPoint , ) -> Ed25519Signature :
"""Create a CoSi signature of ` digest ` with the supplied private key .
This function needs to know the global public key and global commitment ."""
|
h = _ed25519 . H ( privkey )
a = _ed25519 . decodecoord ( h )
S = ( nonce + _ed25519 . Hint ( global_commit + global_pubkey + digest ) * a ) % _ed25519 . l
return Ed25519Signature ( _ed25519 . encodeint ( S ) )
|
def mouseMoveEvent ( self , event ) :
"""Determines if a drag is taking place , and initiates it"""
|
super ( AbstractDragView , self ) . mouseMoveEvent ( event )
if self . dragStartPosition is None or ( event . pos ( ) - self . dragStartPosition ) . manhattanLength ( ) < QtGui . QApplication . startDragDistance ( ) : # change cursor to reflect actions for what its hovering on
index = self . indexAt ( event . pos ( ) )
cursor = self . model ( ) . data ( index , CursorRole )
self . setCursor ( cursor )
return
# mouse has been dragged past a threshold distance
index = self . indexAt ( self . dragStartPosition )
if not index . isValid ( ) :
return
# grab the pixmap first , as it may be cleared from component ,
# and slows GUI due to redraw .
pixmap = self . grabImage ( index )
# get the item at the drug index
selected = self . model ( ) . data ( index , self . DragRole )
if selected is None :
return
# # convert to a bytestream
bstream = cPickle . dumps ( selected )
mimeData = QtCore . QMimeData ( )
mimeData . setData ( "application/x-protocol" , bstream )
# save this component in case the drag ends not in a droppable region ,
# and we want to return it to it ' s original place
self . limbo_component = selected
self . originalPos = index
drag = QtGui . QDrag ( self )
drag . setMimeData ( mimeData )
# this makes the pixmap half transparent
painter = QtGui . QPainter ( pixmap )
painter . setCompositionMode ( painter . CompositionMode_DestinationIn )
painter . fillRect ( pixmap . rect ( ) , QtGui . QColor ( 0 , 0 , 0 , 127 ) )
painter . end ( )
drag . setPixmap ( pixmap )
x , y = self . indexXY ( index )
drag . setHotSpot ( QtCore . QPoint ( event . x ( ) - x , event . y ( ) - y ) )
# drag . setHotSpot ( QtCore . QPoint ( pixmap . width ( ) / 2 , pixmap . height ( ) / 2 ) )
drag . setPixmap ( pixmap )
self . model ( ) . removeItem ( index )
result = drag . exec_ ( QtCore . Qt . MoveAction )
|
def do_mumble ( self , args ) :
"""Mumbles what you tell me to ."""
|
repetitions = args . repeat or 1
for i in range ( min ( repetitions , self . maxrepeats ) ) :
output = [ ]
if random . random ( ) < .33 :
output . append ( random . choice ( self . MUMBLE_FIRST ) )
for word in args . words :
if random . random ( ) < .40 :
output . append ( random . choice ( self . MUMBLES ) )
output . append ( word )
if random . random ( ) < .25 :
output . append ( random . choice ( self . MUMBLE_LAST ) )
self . poutput ( ' ' . join ( output ) )
|
def load ( self , callback = None , errback = None , reload = False ) :
"""Load record data from the API ."""
|
if not reload and self . data :
raise RecordException ( 'record already loaded' )
def success ( result , * args ) :
self . _parseModel ( result )
if callback :
return callback ( self )
else :
return self
return self . _rest . retrieve ( self . parentZone . zone , self . domain , self . type , callback = success , errback = errback )
|
def get_setupcfg_version ( ) :
"""As get _ setup _ version ( ) , but configure via setup . cfg .
If your project uses setup . cfg to configure setuptools , and hence has
at least a " name " key in the [ metadata ] section , you can
set the version as follows :
[ metadata ]
name = mypackage
version = attr : autover . version . get _ setup _ version2
If the repository name is different from the package name , specify
` reponame ` as a [ tool : autover ] option :
[ tool : autover ]
reponame = mypackage
To ensure git information is included in a git archive , add
setup . cfg to . gitattributes ( in addition to _ _ init _ _ ) :
_ _ init _ _ . py export - subst
setup . cfg export - subst
Then add the following to setup . cfg :
[ tool : autover . configparser _ workaround . archive _ commit = $ Format : % h $ ]
The above being a section heading rather than just a key is
because setuptools requires % to be escaped with % , or it can ' t
parse setup . cfg . . . but then git export - subst would not work ."""
|
try :
import configparser
except ImportError :
import ConfigParser as configparser
# python2 ( also prevents dict - like access )
import re
cfg = "setup.cfg"
autover_section = 'tool:autover'
config = configparser . ConfigParser ( )
config . read ( cfg )
pkgname = config . get ( 'metadata' , 'name' )
reponame = config . get ( autover_section , 'reponame' , vars = { 'reponame' : pkgname } ) if autover_section in config . sections ( ) else pkgname
# hack archive _ commit into section heading ; see docstring
archive_commit = None
archive_commit_key = autover_section + '.configparser_workaround.archive_commit'
for section in config . sections ( ) :
if section . startswith ( archive_commit_key ) :
archive_commit = re . match ( r".*=\s*(\S*)\s*" , section ) . group ( 1 )
return get_setup_version ( cfg , reponame = reponame , pkgname = pkgname , archive_commit = archive_commit )
|
def _temp_filename ( contents ) :
"""Make a temporary file with ` contents ` .
The file will be cleaned up on exit ."""
|
fp = tempfile . NamedTemporaryFile ( prefix = 'codequalitytmp' , delete = False )
name = fp . name
fp . write ( contents )
fp . close ( )
_files_to_cleanup . append ( name )
return name
|
def _init_equiv ( self ) :
"""Add equivalent GO IDs to go2color , if necessary ."""
|
gocolored_all = set ( self . go2color )
go2obj_usr = self . gosubdag . go2obj
go2color_add = { }
for gocolored_cur , color in self . go2color . items ( ) : # Ignore GOs in go2color that are not in the user set
if gocolored_cur in go2obj_usr :
goobj = go2obj_usr [ gocolored_cur ]
goids_equiv = goobj . alt_ids . union ( [ goobj . id ] )
# mrk _ alt = " * " if gocolored _ cur ! = goobj . id else " "
# print ( " COLORED ( { } ) KEY ( { } ) { : 1 } ALL ( { } ) " . format (
# gocolored _ cur , goobj . id , mrk _ alt , goids _ equiv ) )
# Loop through GO IDs which are not colored , but are equivalent to colored GO IDs .
for goid_add in goids_equiv . difference ( gocolored_all ) :
if goid_add in go2color_add :
print ( '**TBD: TWO DIFFERENT COLORS FOR EQUIV GO ID' )
# pylint : disable = superfluous - parens
go2color_add [ goid_add ] = color
# print ( " ADDING { N } GO IDs TO go2color " . format ( N = len ( go2color _ add ) ) )
for goid , color in go2color_add . items ( ) :
self . go2color [ goid ] = color
|
def _create_matrix ( self , document , dictionary ) :
"""Creates matrix of shape | unique words | × | sentences | where cells
contains number of occurences of words ( rows ) in senteces ( cols ) ."""
|
sentences = document . sentences
words_count = len ( dictionary )
sentences_count = len ( sentences )
if words_count < sentences_count :
message = ( "Number of words (%d) is lower than number of sentences (%d). " "LSA algorithm may not work properly." )
warn ( message % ( words_count , sentences_count ) )
# create matrix | unique words | × | sentences | filled with zeroes
matrix = numpy . zeros ( ( words_count , sentences_count ) )
for col , sentence in enumerate ( sentences ) :
for word in map ( self . stem_word , sentence . words ) : # only valid words is counted ( not stop - words , . . . )
if word in dictionary :
row = dictionary [ word ]
matrix [ row , col ] += 1
return matrix
|
def num_samples ( self , sr = None ) :
"""Return the number of samples .
Args :
sr ( int ) : Calculate the number of samples with the given
sampling - rate . If None use the native sampling - rate .
Returns :
int : Number of samples"""
|
native_sr = self . sampling_rate
num_samples = units . seconds_to_sample ( self . duration , native_sr )
if sr is not None :
ratio = float ( sr ) / native_sr
num_samples = int ( np . ceil ( num_samples * ratio ) )
return num_samples
|
def element_sub_sketch ( self , keys = None ) :
"""Returns the sketch summary for the given set of keys . This is only
applicable for sketch summary created from SArray of sarray or dict type .
For dict SArray , the keys are the keys in dict value .
For array Sarray , the keys are indexes into the array value .
The keys must be passed into original summary ( ) call in order to
be able to be retrieved later
Parameters
keys : list of str | str | list of int | int
The list of dictionary keys or array index to get sub sketch from .
if not given , then retrieve all sub sketches that are available
Returns
A dictionary that maps from the key ( index ) to the actual sketch summary
for that key ( index )
Examples
> > > sa = turicreate . SArray ( [ { ' a ' : 1 , ' b ' : 2 } , { ' a ' : 4 , ' d ' : 1 } ] )
> > > s = sa . summary ( sub _ sketch _ keys = [ ' a ' , ' b ' ] )
> > > s . element _ sub _ sketch ( [ ' a ' ] )
| item | value | is exact |
| Length | 2 | Yes |
| Min | 1.0 | Yes |
| Max | 4.0 | Yes |
| Mean | 2.5 | Yes |
| Sum | 5.0 | Yes |
| Variance | 2.25 | Yes |
| Standard Deviation | 1.5 | Yes |
| # Missing Values | 0 | Yes |
| # unique values | 2 | No |
Most frequent items :
| value | 1.0 | 4.0 |
| count | 1 | 1 |
Quantiles :
| 0 % | 1 % | 5 % | 25 % | 50 % | 75 % | 95 % | 99 % | 100 % |
| 1.0 | 1.0 | 1.0 | 1.0 | 4.0 | 4.0 | 4.0 | 4.0 | 4.0 |"""
|
single_val = False
if keys is None :
keys = [ ]
else :
if not isinstance ( keys , list ) :
single_val = True
keys = [ keys ]
value_types = set ( [ type ( i ) for i in keys ] )
if ( len ( value_types ) > 1 ) :
raise ValueError ( "All keys should have the same type." )
with cython_context ( ) :
ret_sketches = self . __proxy__ . element_sub_sketch ( keys )
ret = { }
# check return key matches input key
for key in keys :
if key not in ret_sketches :
raise KeyError ( "Cannot retrieve element sub sketch for key '" + str ( key ) + "'. Element sub sketch can only be retrieved when the summary object was created using the 'sub_sketch_keys' option." )
for key in ret_sketches :
ret [ key ] = Sketch ( _proxy = ret_sketches [ key ] )
if single_val :
return ret [ keys [ 0 ] ]
else :
return ret
|
def fill_A ( A , right_eigenvectors ) :
"""Construct feasible initial guess for transformation matrix A .
Parameters
A : ndarray
Possibly non - feasible transformation matrix .
right _ eigenvectors : ndarray
Right eigenvectors of transition matrix
Returns
A : ndarray
Feasible transformation matrix ."""
|
num_micro , num_eigen = right_eigenvectors . shape
A = A . copy ( )
# compute 1st column of A by row sum condition
A [ 1 : , 0 ] = - 1 * A [ 1 : , 1 : ] . sum ( 1 )
# compute 1st row of A by maximum condition
A [ 0 ] = - 1 * dot ( right_eigenvectors [ : , 1 : ] . real , A [ 1 : ] ) . min ( 0 )
# rescale A to be in the feasible set
A /= A [ 0 ] . sum ( )
return A
|
def import_util ( imp ) :
'''Lazily imports a utils ( class ,
function , or variable ) from a module ) from
a string .
@ param imp :'''
|
mod_name , obj_name = imp . rsplit ( '.' , 1 )
mod = importlib . import_module ( mod_name )
return getattr ( mod , obj_name )
|
def summarize_mutation_io ( name , type , required = False ) :
"""This function returns the standard summary for mutations inputs
and outputs"""
|
return dict ( name = name , type = type , required = required )
|
def add_child ( self , parent , title = "" , level = "" , start_date = "" , end_date = "" , date_expression = "" , notes = [ ] , ) :
"""Adds a new resource component parented within ` parent ` .
: param str parent : The ID to a resource or a resource component .
: param str title : A title for the record .
: param str level : The level of description .
: return : The ID of the newly - created record ."""
|
parent_record = self . get_record ( parent )
record_type = self . resource_type ( parent )
repository = parent_record [ "repository" ] [ "ref" ]
if record_type == "resource" :
resource = parent
else :
resource = parent_record [ "resource" ] [ "ref" ]
new_object = { "title" : title , "level" : level , "jsonmodel_type" : "archival_object" , "resource" : { "ref" : resource } , }
# Create dates object if any of the date fields is populated
if date_expression or start_date or end_date :
date = { "jsonmodel_type" : "date" , "date_type" : "inclusive" , "label" : "creation" , }
if date_expression :
date [ "expression" ] = date_expression
if start_date :
date [ "begin" ] = start_date
if end_date :
date [ "end" ] = end_date
new_object [ "dates" ] = [ date ]
new_object [ "notes" ] = [ ]
for note in notes :
note_type = note . get ( "type" , "odd" )
# If there is a note , but it ' s an empty string , skip this ;
# ArchivesSpace doesn ' t allow subnote content to be empty .
content = note . get ( "content" )
if not content :
continue
new_note = { "jsonmodel_type" : "note_multipart" , "publish" : True , "subnotes" : [ { "content" : content , "jsonmodel_type" : "note_text" , "publish" : True } ] , "type" : note_type , }
new_object [ "notes" ] . append ( new_note )
# " parent " always refers to an archival _ object instance ; if this is rooted
# directly to a resource , leave it out .
if record_type == "resource_component" :
new_object [ "parent" ] = { "ref" : parent }
return self . _post ( repository + "/archival_objects" , data = json . dumps ( new_object ) ) . json ( ) [ "uri" ]
|
def is_promisc ( ip , fake_bcast = "ff:ff:00:00:00:00" , ** kargs ) :
"""Try to guess if target is in Promisc mode . The target is provided by its ip ."""
|
# noqa : E501
responses = srp1 ( Ether ( dst = fake_bcast ) / ARP ( op = "who-has" , pdst = ip ) , type = ETH_P_ARP , iface_hint = ip , timeout = 1 , verbose = 0 , ** kargs )
# noqa : E501
return responses is not None
|
def deploy ( self , target , overwrite = False ) :
'''deploy this contract
: param target :
: param account : the account address to use
: return : address , err'''
|
name = self . name . replace ( '<stdin>:' , "" )
key = DB . pkey ( [ EZO . DEPLOYED , name , target , self . hash ] )
if not target :
return None , "target network must be set with -t or --target"
password = os . environ [ 'EZO_PASSWORD' ] if 'EZO_PASSWORD' in os . environ else None
# see if a deployment already exists for this contract on this target
if not overwrite :
res , err = self . _ezo . db . get ( key )
if err :
return None , "ERROR: Contract.deployment() {}" . format ( err )
if res :
return None , "deployment on {} already exists for contract {} use '--overwrite' to force" . format ( target , self . hash )
account = self . _ezo . w3 . toChecksumAddress ( get_account ( self . _ezo . config , target ) )
self . _ezo . w3 . eth . accounts [ 0 ] = account
try :
u_state = self . _ezo . w3 . personal . unlockAccount ( account , password )
except Exception as e :
return None , "unable to unlock account for {} using password" . format ( account )
try :
ct = self . _ezo . w3 . eth . contract ( abi = self . abi , bytecode = self . bin )
gas_estimate = ct . constructor ( ) . estimateGas ( )
h = { 'from' : account , 'gas' : gas_estimate + 1000 }
tx_hash = ct . constructor ( ) . transact ( h )
tx_receipt = self . _ezo . w3 . eth . waitForTransactionReceipt ( tx_hash )
address = tx_receipt [ 'contractAddress' ]
except Exception as e :
return None , e
# finally :
# self . _ ezo . w3 . personal . lockAccount ( account )
d = dict ( )
d [ "contract-name" ] = self . name
d [ "hash" ] = self . hash
d [ "tx-hash" ] = tx_hash
d [ "address" ] = address
d [ "gas-used" ] = tx_receipt [ "gasUsed" ]
d [ "target" ] = target
d [ "timestamp" ] = datetime . utcnow ( )
# save the deployment information
try :
_ , err = self . _ezo . db . save ( key , d , overwrite = overwrite )
if err :
return None , err
except Exception as e :
return None , e
return address , None
|
def GetLocation ( session = None ) :
"""Return specified location or if none the default location associated with the provided credentials and alias .
> > > clc . v2 . Account . GetLocation ( )
u ' WA1'"""
|
if session is not None :
return session [ 'location' ]
if not clc . LOCATION :
clc . v2 . API . _Login ( )
return ( clc . LOCATION )
|
def faces_to_edges ( faces , return_index = False ) :
"""Given a list of faces ( n , 3 ) , return a list of edges ( n * 3,2)
Parameters
faces : ( n , 3 ) int
Vertex indices representing faces
Returns
edges : ( n * 3 , 2 ) int
Vertex indices representing edges"""
|
faces = np . asanyarray ( faces )
# each face has three edges
edges = faces [ : , [ 0 , 1 , 1 , 2 , 2 , 0 ] ] . reshape ( ( - 1 , 2 ) )
if return_index : # edges are in order of faces due to reshape
face_index = np . tile ( np . arange ( len ( faces ) ) , ( 3 , 1 ) ) . T . reshape ( - 1 )
return edges , face_index
return edges
|
def _close_generator ( g ) :
"""PyPy 3 generator has a bug that calling ` close ` caused
memory leak . Before it is fixed , use ` throw ` instead"""
|
if isinstance ( g , generatorwrapper ) :
g . close ( )
elif _get_frame ( g ) is not None :
try :
g . throw ( GeneratorExit_ )
except ( StopIteration , GeneratorExit_ ) :
return
else :
raise RuntimeError ( "coroutine ignored GeneratorExit" )
|
def absent ( name , ip ) : # pylint : disable = C0103
'''Ensure that the named host is absent
name
The host to remove
ip
The ip addr ( s ) of the host to remove'''
|
ret = { 'name' : name , 'changes' : { } , 'result' : None , 'comment' : '' }
if not isinstance ( ip , list ) :
ip = [ ip ]
comments = [ ]
for _ip in ip :
if not __salt__ [ 'hosts.has_pair' ] ( _ip , name ) :
ret [ 'result' ] = True
comments . append ( 'Host {0} ({1}) already absent' . format ( name , _ip ) )
else :
if __opts__ [ 'test' ] :
comments . append ( 'Host {0} ({1}) needs to be removed' . format ( name , _ip ) )
else :
if __salt__ [ 'hosts.rm_host' ] ( _ip , name ) :
ret [ 'changes' ] = { 'host' : name }
ret [ 'result' ] = True
comments . append ( 'Removed host {0} ({1})' . format ( name , _ip ) )
else :
ret [ 'result' ] = False
comments . append ( 'Failed to remove host' )
ret [ 'comment' ] = '\n' . join ( comments )
return ret
|
def get_python ( cls ) :
"""returns the python and pip version
: return : python version , pip version"""
|
python_version = sys . version_info [ : 3 ]
v_string = [ str ( i ) for i in python_version ]
python_version_s = '.' . join ( v_string )
# pip _ version = pip . _ _ version _ _
pip_version = Shell . pip ( "--version" ) . split ( ) [ 1 ]
return python_version_s , pip_version
|
def _process_dimension_kwargs ( direction , kwargs ) :
"""process kwargs for AxDimension instances by stripping off the prefix
for the appropriate direction"""
|
acceptable_keys = [ 'unit' , 'pad' , 'lim' , 'label' ]
# if direction in [ ' s ' ] :
# acceptable _ keys + = [ ' mode ' ]
processed_kwargs = { }
for k , v in kwargs . items ( ) :
if k . startswith ( direction ) :
processed_key = k . lstrip ( direction )
else :
processed_key = k
if processed_key in acceptable_keys :
processed_kwargs [ processed_key ] = v
return processed_kwargs
|
def is_unitless ( ds , variable ) :
'''Returns true if the variable is unitless
Note units of ' 1 ' are considered whole numbers or parts but still represent
physical units and not the absence of units .
: param netCDF4 . Dataset ds : An open netCDF dataset
: param str variable : Name of the variable'''
|
units = getattr ( ds . variables [ variable ] , 'units' , None )
return units is None or units == ''
|
def set_ghost_file ( self , ghost_file ) :
"""Sets ghost RAM file
: ghost _ file : path to ghost file"""
|
yield from self . _hypervisor . send ( 'vm set_ghost_file "{name}" {ghost_file}' . format ( name = self . _name , ghost_file = shlex . quote ( ghost_file ) ) )
log . info ( 'Router "{name}" [{id}]: ghost file set to {ghost_file}' . format ( name = self . _name , id = self . _id , ghost_file = ghost_file ) )
self . _ghost_file = ghost_file
|
def _set_port ( self , v , load = False ) :
"""Setter method for port , mapped from YANG variable / system _ monitor / port ( container )
If this variable is read - only ( config : false ) in the
source YANG file , then _ set _ port is considered as a private
method . Backends looking to populate this variable should
do so via calling thisObj . _ set _ port ( ) directly ."""
|
if hasattr ( v , "_utype" ) :
v = v . _utype ( v )
try :
t = YANGDynClass ( v , base = port . port , is_container = 'container' , presence = True , yang_name = "port" , rest_name = "port" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'info' : u'Port Monitoring' , u'cli-suppress-no' : None , u'cli-add-mode' : None , u'cli-full-command' : None , u'callpoint' : u'PortCrcMonitoring' , u'cli-mode-name' : u'config-sys-mon-port' } } , namespace = 'urn:brocade.com:mgmt:brocade-system-monitor' , defining_module = 'brocade-system-monitor' , yang_type = 'container' , is_config = True )
except ( TypeError , ValueError ) :
raise ValueError ( { 'error-string' : """port must be of a type compatible with container""" , 'defined-type' : "container" , 'generated-type' : """YANGDynClass(base=port.port, is_container='container', presence=True, yang_name="port", rest_name="port", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Port Monitoring', u'cli-suppress-no': None, u'cli-add-mode': None, u'cli-full-command': None, u'callpoint': u'PortCrcMonitoring', u'cli-mode-name': u'config-sys-mon-port'}}, namespace='urn:brocade.com:mgmt:brocade-system-monitor', defining_module='brocade-system-monitor', yang_type='container', is_config=True)""" , } )
self . __port = t
if hasattr ( self , '_set' ) :
self . _set ( )
|
def load_checkpoint ( prefix , epoch ) :
"""Load model checkpoint from file .
Parameters
prefix : str
Prefix of model name .
epoch : int
Epoch number of model we would like to load .
Returns
symbol : Symbol
The symbol configuration of computation network .
arg _ params : dict of str to NDArray
Model parameter , dict of name to NDArray of net ' s weights .
aux _ params : dict of str to NDArray
Model parameter , dict of name to NDArray of net ' s auxiliary states .
Notes
- Symbol will be loaded from ` ` prefix - symbol . json ` ` .
- Parameters will be loaded from ` ` prefix - epoch . params ` ` ."""
|
symbol = sym . load ( '%s-symbol.json' % prefix )
save_dict = nd . load ( '%s-%04d.params' % ( prefix , epoch ) )
arg_params = { }
aux_params = { }
for k , v in save_dict . items ( ) :
tp , name = k . split ( ':' , 1 )
if tp == 'arg' :
arg_params [ name ] = v
if tp == 'aux' :
aux_params [ name ] = v
return ( symbol , arg_params , aux_params )
|
def run_powerflow_onthefly ( components , components_data , grid , export_pypsa_dir = None , debug = False ) :
"""Run powerflow to test grid stability
Two cases are defined to be tested here :
i ) load case
ii ) feed - in case
Parameters
components : dict of pandas . DataFrame
components _ data : dict of pandas . DataFrame
export _ pypsa _ dir : str
Sub - directory in output / debug / grid / where csv Files of PyPSA network are exported to .
Export is omitted if argument is empty ."""
|
scenario = cfg_ding0 . get ( "powerflow" , "test_grid_stability_scenario" )
start_hour = cfg_ding0 . get ( "powerflow" , "start_hour" )
end_hour = cfg_ding0 . get ( "powerflow" , "end_hour" )
# choose temp _ id
temp_id_set = 1
timesteps = 2
start_time = datetime ( 1970 , 1 , 1 , 00 , 00 , 0 )
resolution = 'H'
# inspect grid data for integrity
if debug :
data_integrity ( components , components_data )
# define investigated time range
timerange = DatetimeIndex ( freq = resolution , periods = timesteps , start = start_time )
# TODO : Instead of hard coding PF config , values from class PFConfigDing0 can be used here .
# create PyPSA powerflow problem
network , snapshots = create_powerflow_problem ( timerange , components )
# import pq - sets
for key in [ 'Load' , 'Generator' ] :
for attr in [ 'p_set' , 'q_set' ] : # catch MV grid districts without generators
if not components_data [ key ] . empty :
series = transform_timeseries4pypsa ( components_data [ key ] [ attr ] . to_frame ( ) , timerange , column = attr )
import_series_from_dataframe ( network , series , key , attr )
series = transform_timeseries4pypsa ( components_data [ 'Bus' ] [ 'v_mag_pu_set' ] . to_frame ( ) , timerange , column = 'v_mag_pu_set' )
import_series_from_dataframe ( network , series , 'Bus' , 'v_mag_pu_set' )
# add coordinates to network nodes and make ready for map plotting
# network = add _ coordinates ( network )
# start powerflow calculations
network . pf ( snapshots )
# # make a line loading plot
# # TODO : make this optional
# plot _ line _ loading ( network , timestep = 0,
# filename = ' Line _ loading _ load _ case . png ' )
# plot _ line _ loading ( network , timestep = 1,
# filename = ' Line _ loading _ feed - in _ case . png ' )
# process results
bus_data , line_data = process_pf_results ( network )
# assign results data to graph
assign_bus_results ( grid , bus_data )
assign_line_results ( grid , line_data )
# export network if directory is specified
if export_pypsa_dir :
export_to_dir ( network , export_dir = export_pypsa_dir )
|
def add ( self , model ) :
"""Add a index method .
This method is used to add index algorithms . If multiple algorithms
are added , the union of the record pairs from the algorithm is taken .
Parameters
model : list , class
A ( list of ) index algorithm ( s ) from
: mod : ` recordlinkage . index ` ."""
|
if isinstance ( model , list ) :
self . algorithms = self . algorithms + model
else :
self . algorithms . append ( model )
|
def use_trump_data ( self , symbols ) :
"""Use trump data to build conversion table
symbols :
list of symbols :
will attempt to use units to build the conversion table ,
strings represent symbol names ."""
|
dfs = { sym . units : sym . df [ sym . name ] for sym in symbols }
self . build_conversion_table ( dfs )
|
def match_path ( entry , opts ) :
"""Return True if ` path ` matches ` match ` and ` exclude ` options ."""
|
if entry . name in ALWAYS_OMIT :
return False
# TODO : currently we use fnmatch syntax and match against names .
# We also might allow glob syntax and match against the whole relative path instead
# path = entry . get _ rel _ path ( )
path = entry . name
ok = True
match = opts . get ( "match" )
exclude = opts . get ( "exclude" )
if entry . is_file ( ) and match :
assert type ( match ) is list
ok = False
for pat in match :
if fnmatch . fnmatch ( path , pat ) :
ok = True
break
if ok and exclude :
assert type ( exclude ) is list
for pat in exclude :
if fnmatch . fnmatch ( path , pat ) :
ok = False
break
# write ( " match " , ok , entry )
return ok
|
def concrete_descendents ( parentclass ) :
"""Return a dictionary containing all subclasses of the specified
parentclass , including the parentclass . Only classes that are
defined in scripts that have been run or modules that have been
imported are included , so the caller will usually first do ` ` from
package import * ` ` .
Only non - abstract classes will be included ."""
|
return dict ( ( c . __name__ , c ) for c in descendents ( parentclass ) if not _is_abstract ( c ) )
|
def parse ( requirements ) :
"""Parses given requirements line - by - line ."""
|
transformer = RTransformer ( )
return map ( transformer . transform , filter ( None , map ( _parse , requirements . splitlines ( ) ) ) )
|
def txt ( self , txt , h = None , at_x = None , to_x = None , change_style = None , change_size = None ) :
"""print string to defined ( at _ x ) position
to _ x can apply only if at _ x is None and if used then forces align = ' R '"""
|
h = h or self . height
self . _change_props ( change_style , change_size )
align = 'L'
w = None
if at_x is None :
if to_x is not None :
align = 'R'
self . oPdf . set_x ( 0 )
w = to_x
else :
self . oPdf . set_x ( at_x )
if w is None :
w = self . oPdf . get_string_width ( txt )
self . oPdf . cell ( w , h = h , txt = txt , align = align )
|
def setParams ( self , estimator = None , estimatorParamMaps = None , evaluator = None , trainRatio = 0.75 , parallelism = 1 , collectSubModels = False , seed = None ) :
"""setParams ( self , estimator = None , estimatorParamMaps = None , evaluator = None , trainRatio = 0.75 , parallelism = 1 , collectSubModels = False , seed = None ) :
Sets params for the train validation split ."""
|
kwargs = self . _input_kwargs
return self . _set ( ** kwargs )
|
def to_json ( self ) :
"""Returns the JSON representation of the space membership ."""
|
result = super ( SpaceMembership , self ) . to_json ( )
result . update ( { 'admin' : self . admin , 'roles' : self . roles } )
return result
|
def is_installed ( self ) :
"""Check if the tool is installed .
Returns
is _ installed : bool
True if the tool is installed ."""
|
return self . is_configured ( ) and os . access ( self . bin ( ) , os . X_OK )
|
def import_locations ( self , gpx_file ) :
"""Import GPX data files .
` ` import _ locations ( ) ` ` returns a list with : class : ` ~ gpx . Waypoint `
objects .
It expects data files in GPX format , as specified in ` GPX 1.1 Schema
Documentation ` _ , which is XML such as : :
< ? xml version = " 1.0 " encoding = " utf - 8 " standalone = " no " ? >
< gpx version = " 1.1 " creator = " PocketGPSWorld . com "
xmlns = " http : / / www . topografix . com / GPX / 1/1"
xmlns : xsi = " http : / / www . w3 . org / 2001 / XMLSchema - instance "
xsi : schemaLocation = " http : / / www . topografix . com / GPX / 1/1 http : / / www . topografix . com / GPX / 1/1 / gpx . xsd " >
< wpt lat = " 52.015 " lon = " - 0.221 " >
< name > Home < / name >
< desc > My place < / desc >
< / wpt >
< wpt lat = " 52.167 " lon = " 0.390 " >
< name > MSR < / name >
< desc > Microsoft Research , Cambridge < / desc >
< / wpt >
< / gpx >
The reader uses the : mod : ` ElementTree ` module , so should be very fast
when importing data . The above file processed by
` ` import _ locations ( ) ` ` will return the following ` ` list ` ` object : :
[ Waypoint ( 52.015 , - 0.221 , " Home " , " My place " ) ,
Waypoint ( 52.167 , 0.390 , " MSR " , " Microsoft Research , Cambridge " ) ]
Args :
gpx _ file ( iter ) : GPX data to read
Returns :
list : Locations with optional comments
. . _ GPX 1.1 Schema Documentation : http : / / www . topografix . com / GPX / 1/1/"""
|
self . _gpx_file = gpx_file
data = utils . prepare_xml_read ( gpx_file , objectify = True )
try :
self . metadata . import_metadata ( data . metadata )
except AttributeError :
pass
for waypoint in data . wpt :
latitude = waypoint . get ( 'lat' )
longitude = waypoint . get ( 'lon' )
try :
name = waypoint . name . text
except AttributeError :
name = None
try :
description = waypoint . desc . text
except AttributeError :
description = None
try :
elevation = float ( waypoint . ele . text )
except AttributeError :
elevation = None
try :
time = utils . Timestamp . parse_isoformat ( waypoint . time . text )
except AttributeError :
time = None
self . append ( Waypoint ( latitude , longitude , name , description , elevation , time ) )
|
def avail_locations ( call = None ) :
'''returns a list of locations available to you'''
|
creds = get_creds ( )
clc . v1 . SetCredentials ( creds [ "token" ] , creds [ "token_pass" ] )
locations = clc . v1 . Account . GetLocations ( )
return locations
|
def maybe_download ( directory , filename , uri ) :
"""Download filename from uri unless it ' s already in directory .
Copies a remote file to local if that local file does not already exist . If
the local file pre - exists this function call , it does not check that the local
file is a copy of the remote .
Remote filenames can be filepaths , any URI readable by tensorflow . gfile , or a
URL .
Args :
directory : path to the directory that will be used .
filename : name of the file to download to ( do nothing if it already exists ) .
uri : URI to copy ( or download ) from .
Returns :
The path to the downloaded file ."""
|
tf . gfile . MakeDirs ( directory )
filepath = os . path . join ( directory , filename )
if tf . gfile . Exists ( filepath ) :
tf . logging . info ( "Not downloading, file already found: %s" % filepath )
return filepath
tf . logging . info ( "Downloading %s to %s" % ( uri , filepath ) )
try :
tf . gfile . Copy ( uri , filepath )
except tf . errors . UnimplementedError :
if uri . startswith ( "http" ) :
inprogress_filepath = filepath + ".incomplete"
inprogress_filepath , _ = urllib . urlretrieve ( uri , inprogress_filepath , reporthook = download_report_hook )
# Print newline to clear the carriage return from the download progress
print ( )
tf . gfile . Rename ( inprogress_filepath , filepath )
else :
raise ValueError ( "Unrecognized URI: " + filepath )
statinfo = os . stat ( filepath )
tf . logging . info ( "Successfully downloaded %s, %s bytes." % ( filename , statinfo . st_size ) )
return filepath
|
def cudnnCreatePoolingDescriptor ( ) :
"""Create pooling descriptor .
This function creates a pooling descriptor object by allocating the memory needed to
hold its opaque structure ,
Returns
poolingDesc : cudnnPoolingDescriptor
Newly allocated pooling descriptor ."""
|
poolingDesc = ctypes . c_void_p ( )
status = _libcudnn . cudnnCreatePoolingDescriptor ( ctypes . byref ( poolingDesc ) )
cudnnCheckStatus ( status )
return poolingDesc . value
|
def write_cell ( self , x , y , value ) :
"""Writing value in the cell of x + 1 and y + 1 position
: param x : line index
: param y : coll index
: param value : value to be written
: return :"""
|
x += 1
y += 1
self . _sheet . update_cell ( x , y , value )
|
def get_timesheet_collection_for_context ( ctx , entries_file = None ) :
"""Return a : class : ` ~ taxi . timesheet . TimesheetCollection ` object with the current timesheet ( s ) . Since this depends on
the settings ( to get the entries files path , the number of previous files , etc ) this uses the settings object from
the current command context . If ` entries _ file ` is set , this forces the path of the file to be used ."""
|
if not entries_file :
entries_file = ctx . obj [ 'settings' ] . get_entries_file_path ( False )
parser = TimesheetParser ( date_format = ctx . obj [ 'settings' ] [ 'date_format' ] , add_date_to_bottom = ctx . obj [ 'settings' ] . get_add_to_bottom ( ) , flags_repr = ctx . obj [ 'settings' ] . get_flags ( ) , )
return TimesheetCollection . load ( entries_file , ctx . obj [ 'settings' ] [ 'nb_previous_files' ] , parser )
|
def update_pass ( user_id , newpass ) :
'''Update the password of a user .'''
|
out_dic = { 'success' : False , 'code' : '00' }
entry = TabMember . update ( user_pass = tools . md5 ( newpass ) ) . where ( TabMember . uid == user_id )
entry . execute ( )
out_dic [ 'success' ] = True
return out_dic
|
def prevSolarReturn ( date , lon ) :
"""Returns the previous date when sun is at longitude ' lon ' ."""
|
jd = eph . prevSolarReturn ( date . jd , lon )
return Datetime . fromJD ( jd , date . utcoffset )
|
def subscribe ( self , stream ) :
"""Subscribe to a stream .
: param stream : stream to subscribe to
: type stream : str
: raises : : class : ` ~ datasift . exceptions . StreamSubscriberNotStarted ` , : class : ` ~ datasift . exceptions . DeleteRequired ` , : class : ` ~ datasift . exceptions . StreamNotConnected `
Used as a decorator , eg . : :
@ client . subscribe ( stream )
def subscribe _ to _ hash ( msg ) :
print ( msg )"""
|
if not self . _stream_process_started :
raise StreamSubscriberNotStarted ( )
def real_decorator ( func ) :
if not self . _on_delete :
raise DeleteRequired ( """An on_delete function is required. You must process delete messages and remove
them from your system (if stored) in order to remain compliant with the ToS""" )
if hasattr ( self . factory , 'datasift' ) and 'send_message' in self . factory . datasift : # pragma : no cover
self . subscriptions [ stream ] = func
self . factory . datasift [ 'send_message' ] ( json . dumps ( { "action" : "subscribe" , "hash" : stream } ) . encode ( "utf8" ) )
else : # pragma : no cover
raise StreamNotConnected ( 'The client is not connected to DataSift, unable to subscribe to stream' )
return real_decorator
|
def get_error_page ( self , loadbalancer ) :
"""Load Balancers all have a default error page that is shown to
an end user who is attempting to access a load balancer node
that is offline / unavailable ."""
|
uri = "/loadbalancers/%s/errorpage" % utils . get_id ( loadbalancer )
resp , body = self . api . method_get ( uri )
return body
|
def getDate ( self ) :
"returns the GMT response datetime or None"
|
date = self . headers . get ( 'date' )
if date :
date = self . convertTimeString ( date )
return date
|
def convert_dms_to_dd ( degree_num , degree_den , minute_num , minute_den , second_num , second_den ) :
"""Convert the degree / minute / Second formatted GPS data to decimal
degrees .
@ param degree _ num : the numerator of the degree object .
@ param degree _ den : the denominator of the degree object .
@ param minute _ num : the numerator of the minute object .
@ param minute _ den : the denominator of the minute object .
@ param second _ num : the numerator of the second object .
@ param second _ den : the denominator of the second object .
@ return : a decimal degree ."""
|
degree = float ( degree_num ) / float ( degree_den )
minute = float ( minute_num ) / float ( minute_den ) / 60
second = float ( second_num ) / float ( second_den ) / 3600
return degree + minute + second
|
def args_priority ( args , environ ) :
'''priority of token
1 ) as argumment : - t
2 ) as environ variable
priority of as _ user
1 ) as argument : - a
2 ) as environ variable'''
|
arg_token = args . token
arg_as_user = args . as_user
slack_token_var_name = 'SLACK_TOKEN'
if slack_token_var_name in environ . keys ( ) :
token = environ [ slack_token_var_name ]
else :
token = None
if arg_token :
token = arg_token
# slack as _ user
slack_as_user_var_name = 'SLACK_AS_USER'
as_user = bool ( environ . get ( slack_as_user_var_name ) )
if arg_as_user :
as_user = True
return token , as_user , args . channel
|
def ask_for_confirm_with_message ( cls , ui , prompt = 'Do you agree?' , message = '' , ** options ) :
"""Returns True if user agrees , False otherwise"""
|
return cls . get_appropriate_helper ( ui ) . ask_for_confirm_with_message ( prompt , message )
|
def fetcher_with_object ( cls , parent_object , relationship = "child" ) :
"""Register the fetcher for a served object .
This method will fill the fetcher with ` managed _ class ` instances
Args :
parent _ object : the instance of the parent object to serve
Returns :
It returns the fetcher instance ."""
|
fetcher = cls ( )
fetcher . parent_object = parent_object
fetcher . relationship = relationship
rest_name = cls . managed_object_rest_name ( )
parent_object . register_fetcher ( fetcher , rest_name )
return fetcher
|
def ra ( self , * args , ** kwargs ) :
"""NAME :
ra
PURPOSE :
return the right ascension
INPUT :
t - ( optional ) time at which to get ra ( can be Quantity )
obs = [ X , Y , Z ] - ( optional ) position of observer ( in kpc ; entries can be Quantity )
( default = [ 8.0,0 . , 0 . ] ) OR Orbit object that corresponds to the orbit of the observer
( default = Object - wide default ; can be Quantity )
Y is ignored and always assumed to be zero
ro = ( Object - wide default ) physical scale for distances to use to convert ( can be Quantity )
OUTPUT :
ra ( t ) in deg
HISTORY :
2011-02-23 - Written - Bovy ( NYU )"""
|
out = self . _orb . ra ( * args , ** kwargs )
if len ( out ) == 1 :
return out [ 0 ]
else :
return out
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.