signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def api_upload ( service , encData , encMeta , keys ) :
'''Uploads data to Send .
Caution ! Data is uploaded as given , this function will not encrypt it for you''' | service += 'api/upload'
files = requests_toolbelt . MultipartEncoder ( fields = { 'file' : ( 'blob' , encData , 'application/octet-stream' ) } )
pbar = progbar ( files . len )
monitor = requests_toolbelt . MultipartEncoderMonitor ( files , lambda files : pbar . update ( monitor . bytes_read - pbar . n ) )
headers = { 'X-File-Metadata' : unpadded_urlsafe_b64encode ( encMeta ) , 'Authorization' : 'send-v1 ' + unpadded_urlsafe_b64encode ( keys . authKey ) , 'Content-type' : monitor . content_type }
r = requests . post ( service , data = monitor , headers = headers , stream = True )
r . raise_for_status ( )
pbar . close ( )
body_json = r . json ( )
secretUrl = body_json [ 'url' ] + '#' + unpadded_urlsafe_b64encode ( keys . secretKey )
fileId = body_json [ 'id' ]
fileNonce = unpadded_urlsafe_b64decode ( r . headers [ 'WWW-Authenticate' ] . replace ( 'send-v1 ' , '' ) )
try :
owner_token = body_json [ 'owner' ]
except :
owner_token = body_json [ 'delete' ]
return secretUrl , fileId , fileNonce , owner_token |
def get_git_cleaned_branch_name ( path ) :
"""Get the git branch name of the current HEAD in path . The branch name is
scrubbed to conform to PEP - 440.
PEP - 440 Local version identifiers shall only consist out of :
- ASCII letters ( [ a - zA - Z ] )
- ASCII digits ( [ 0-9 ] )
- periods ( . )
https : / / www . python . org / dev / peps / pep - 0440 / # local - version - identifiers
Parameters
path : str
The path to run git commands in .""" | # Get name of current branch ( or ' HEAD ' for a detached HEAD )
branch_name = run_cmd ( path , 'git' , 'rev-parse' , '--abbrev-ref' , 'HEAD' )
branch_name = re . sub ( r"[^A-Za-z0-9]+" , "." , branch_name . strip ( ) )
return branch_name |
def put_watch ( self , id , body = None , params = None ) :
"""` < http : / / www . elastic . co / guide / en / elasticsearch / reference / current / watcher - api - put - watch . html > ` _
: arg id : Watch ID
: arg body : The watch
: arg active : Specify whether the watch is in / active by default
: arg if _ primary _ term : only update the watch if the last operation that
has changed the watch has the specified primary term
: arg if _ seq _ no : only update the watch if the last operation that has
changed the watch has the specified sequence number
: arg version : Explicit version number for concurrency control""" | if id in SKIP_IN_PATH :
raise ValueError ( "Empty value passed for a required argument 'id'." )
return self . transport . perform_request ( "PUT" , _make_path ( "_watcher" , "watch" , id ) , params = params , body = body ) |
def find_dates ( text , source = False , index = False , strict = False , base_date = None ) :
"""Extract datetime strings from text
: param text :
A string that contains one or more natural language or literal
datetime strings
: type text : str | unicode
: param source :
Return the original string segment
: type source : boolean
: param index :
Return the indices where the datetime string was located in text
: type index : boolean
: param strict :
Only return datetimes with complete date information . For example :
` July 2016 ` of ` Monday ` will not return datetimes .
` May 16 , 2015 ` will return datetimes .
: type strict : boolean
: param base _ date :
Set a default base datetime when parsing incomplete dates
: type base _ date : datetime
: return : Returns a generator that produces : mod : ` datetime . datetime ` objects ,
or a tuple with the source text and index , if requested""" | date_finder = DateFinder ( base_date = base_date )
return date_finder . find_dates ( text , source = source , index = index , strict = strict ) |
def TeArraySizeCheck ( self ) :
"""Checks that Te and q0 array sizes are compatible
For finite difference solution .""" | # Only if they are both defined and are arrays
# Both being arrays is a possible bug in this check routine that I have
# intentionally introduced
if type ( self . Te ) == np . ndarray and type ( self . qs ) == np . ndarray : # Doesn ' t touch non - arrays or 1D arrays
if type ( self . Te ) is np . ndarray :
if ( np . array ( self . Te . shape ) != np . array ( self . qs . shape ) ) . any ( ) :
sys . exit ( "q0 and Te arrays have incompatible shapes. Exiting." )
else :
if self . Debug :
print ( "Te and qs array sizes pass consistency check" ) |
def assembly ( self , value ) :
"""The assembly property .
Args :
value ( string ) . the property value .""" | if value == self . _defaults [ 'assembly' ] and 'assembly' in self . _values :
del self . _values [ 'assembly' ]
else :
self . _values [ 'assembly' ] = value |
def weighted_pairwise_distances ( X , w , metric = 'euclidean' , w_pow = 0.5 ) :
r"""Given a feature matrix ` ` X ` ` with weights ` ` w ` ` , calculates the modified
distance metric : math : ` \ tilde { d } ( p , q ) = d ( p , q ) / ( w ( p ) w ( q ) N ^ 2 ) ^ p ` , where
: math : ` N ` is the length of ` ` X ` ` . This metric is such that " heavy " feature
vectors are considered to be closer to each other than " light " feature
vectors , and are hence correspondingly less likely to be considered part of
the same cluster .""" | if sklearn is None :
raise ImportError ( "This function requires scikit-learn." )
base_metric = sklearn . metrics . pairwise . pairwise_distances ( X , metric = metric )
N = w . shape [ 0 ]
w_matrix = outer_product ( w ) * N ** 2
return base_metric / ( w_matrix ** w_pow ) |
def get_path_type ( path_ ) :
r"""returns if a path is a file , directory , link , or mount""" | path_type = ''
if isfile ( path_ ) :
path_type += 'file'
if isdir ( path_ ) :
path_type += 'directory'
if islink ( path_ ) :
path_type += 'link'
if ismount ( path_ ) :
path_type += 'mount'
return path_type |
def topic_subscribers_data_message ( self , topic_name = None , condition = None , collapse_key = None , delay_while_idle = False , time_to_live = None , restricted_package_name = None , low_priority = False , dry_run = False , data_message = None , content_available = None , timeout = 5 , extra_notification_kwargs = None , extra_kwargs = { } ) :
"""Sends data notification to multiple devices subscribed to a topic
Args :
topic _ name ( topic _ name ) : Name of the topic to deliver messages to
condition ( condition ) : Topic condition to deliver messages to
A topic name is a string that can be formed with any character in [ a - zA - Z0-9 - _ . ~ % ]
data _ message ( dict ) : Data message payload to send alone or with the notification message
Keyword Args :
collapse _ key ( str , optional ) : Identifier for a group of messages
that can be collapsed so that only the last message gets sent
when delivery can be resumed . Defaults to ` ` None ` ` .
delay _ while _ idle ( bool , optional ) : If ` ` True ` ` indicates that the
message should not be sent until the device becomes active .
time _ to _ live ( int , optional ) : How long ( in seconds ) the message
should be kept in FCM storage if the device is offline . The
maximum time to live supported is 4 weeks . Defaults to ` ` None ` `
which uses the FCM default of 4 weeks .
low _ priority ( boolean , optional ) : Whether to send notification with
the low priority flag . Defaults to ` ` False ` ` .
restricted _ package _ name ( str , optional ) : Package name of the
application where the registration IDs must match in order to
receive the message . Defaults to ` ` None ` ` .
dry _ run ( bool , optional ) : If ` ` True ` ` no message will be sent but
request will be tested .
Returns :
: tuple : ` multicast _ id ( long ) , success ( int ) , failure ( int ) , canonical _ ids ( int ) , results ( list ) ` :
Response from FCM server .
Raises :
AuthenticationError : If : attr : ` api _ key ` is not set or provided or there is an error authenticating the sender .
FCMServerError : Internal server error or timeout error on Firebase cloud messaging server
InvalidDataError : Invalid data provided
InternalPackageError : JSON parsing error , mostly from changes in the response of FCM , create a new github issue to resolve it .""" | if extra_kwargs is None :
extra_kwargs = { }
payload = self . parse_payload ( topic_name = topic_name , condition = condition , collapse_key = collapse_key , delay_while_idle = delay_while_idle , time_to_live = time_to_live , restricted_package_name = restricted_package_name , low_priority = low_priority , dry_run = dry_run , data_message = data_message , content_available = content_available , remove_notification = True , extra_notification_kwargs = extra_notification_kwargs , ** extra_kwargs )
self . send_request ( [ payload ] , timeout )
return self . parse_responses ( ) |
def register_incoming_conn ( self , conn ) :
"""Add incoming connection into the heap .""" | assert conn , "conn is required"
conn . set_outbound_pending_change_callback ( self . _on_conn_change )
self . connections . appendleft ( conn )
self . _set_on_close_cb ( conn )
self . _on_conn_change ( ) |
def fetch_file ( self , in_path , out_path ) :
'''save a remote file to the specified path''' | vvv ( "FETCH %s TO %s" % ( in_path , out_path ) , host = self . host )
data = dict ( mode = 'fetch' , in_path = in_path )
data = utils . jsonify ( data )
data = utils . encrypt ( self . key , data )
self . socket . send ( data )
response = self . socket . recv ( )
response = utils . decrypt ( self . key , response )
response = utils . parse_json ( response )
response = response [ 'data' ]
response = base64 . b64decode ( response )
fh = open ( out_path , "w" )
fh . write ( response )
fh . close ( ) |
def extract_subsection ( im , shape ) :
r"""Extracts the middle section of a image
Parameters
im : ND - array
Image from which to extract the subsection
shape : array _ like
Can either specify the size of the extracted section or the fractional
size of the image to extact .
Returns
image : ND - array
An ND - array of size given by the ` ` shape ` ` argument , taken from the
center of the image .
Examples
> > > import scipy as sp
> > > from porespy . tools import extract _ subsection
> > > im = sp . array ( [ [ 1 , 1 , 1 , 1 ] , [ 1 , 2 , 2 , 2 ] , [ 1 , 2 , 3 , 3 ] , [ 1 , 2 , 3 , 4 ] ] )
> > > print ( im )
[ [ 1 1 1 1]
[1 2 2 2]
[1 2 3 3]
[1 2 3 4 ] ]
> > > im = extract _ subsection ( im = im , shape = [ 2 , 2 ] )
> > > print ( im )
[ [ 2 2]
[2 3 ] ]""" | # Check if shape was given as a fraction
shape = sp . array ( shape )
if shape [ 0 ] < 1 :
shape = sp . array ( im . shape ) * shape
center = sp . array ( im . shape ) / 2
s_im = [ ]
for dim in range ( im . ndim ) :
r = shape [ dim ] / 2
lower_im = sp . amax ( ( center [ dim ] - r , 0 ) )
upper_im = sp . amin ( ( center [ dim ] + r , im . shape [ dim ] ) )
s_im . append ( slice ( int ( lower_im ) , int ( upper_im ) ) )
return im [ tuple ( s_im ) ] |
def get_content_type ( self , content_type ) :
"""Get all the items of the given content type related to this item .""" | qs = self . get_queryset ( )
return qs . filter ( content_type__name = content_type ) |
def err_response ( reqid , code , msg , data = None ) :
"""Formats a JSON - RPC error as a dict with keys : ' jsonrpc ' , ' id ' , ' error '""" | err = { "code" : code , "message" : msg }
if data :
err [ "data" ] = data
return { "jsonrpc" : "2.0" , "id" : reqid , "error" : err } |
def __check_msg_for_headers ( self , msg , ** email_headers ) :
"""Checks an Email . Message object for the headers in email _ headers .
Following are acceptable header names : [ ' Delivered - To ' ,
' Received ' , ' Return - Path ' , ' Received - SPF ' ,
' Authentication - Results ' , ' DKIM - Signature ' ,
' DomainKey - Signature ' , ' From ' , ' To ' , ' Message - ID ' ,
' Subject ' , ' MIME - Version ' , ' Content - Type ' , ' Date ' ,
' X - Sendgrid - EID ' , ' Sender ' ] .
@ Params
msg - the Email . message object to check
email _ headers - list of headers to check against
@ Returns
Boolean whether all the headers were found""" | all_headers_found = False
email_headers [ 'Delivered-To' ] = email_headers [ 'To' ]
email_headers . pop ( 'To' )
all_headers_found = all ( k in msg . keys ( ) for k in email_headers )
return all_headers_found |
def fire_lifecycle_event ( self , new_state ) :
"""Called when instance ' s state changes .
: param new _ state : ( Lifecycle State ) , the new state of the instance .""" | if new_state == LIFECYCLE_STATE_SHUTTING_DOWN :
self . is_live = False
self . state = new_state
self . logger . info ( self . _git_info + "HazelcastClient is %s" , new_state , extra = self . _logger_extras )
for listener in list ( self . _listeners . values ( ) ) :
try :
listener ( new_state )
except :
self . logger . exception ( "Exception in lifecycle listener" , extra = self . _logger_extras ) |
def error ( self , msg ) :
'''Raise a ConfigParseError at the current input position''' | if self . finished ( ) :
raise ConfigParseError ( "Unexpected end of input; %s" % ( msg , ) )
else :
t = self . peek ( )
raise ConfigParseError ( "Unexpected token %s; %s" % ( t , msg ) ) |
def query ( self , sql , * args , ** kwargs ) :
"""Executes an SQL SELECT query and returns rows generator .
: param sql : query to execute
: param args : parameters iterable
: param kwargs : parameters iterable
: return : rows generator
: rtype : generator""" | with self . locked ( ) as conn :
for row in conn . query ( sql , * args , ** kwargs ) :
yield row |
def get_user_details ( self , response ) :
"""Complete with additional information from environment , as available .""" | result = { 'username' : response [ self . ENV_USERNAME ] , 'email' : response . get ( self . ENV_EMAIL , None ) , 'first_name' : response . get ( self . ENV_FIRST_NAME , None ) , 'last_name' : response . get ( self . ENV_LAST_NAME , None ) }
if result [ 'first_name' ] and result [ 'last_name' ] :
result [ 'fullname' ] = result [ 'first_name' ] + ' ' + result [ 'last_name' ]
logger . debug ( "Returning user details: " + str ( result ) )
return result |
def save ( self ) :
"""Send this object ' s mutable values to the server in a PUT request""" | resp = self . _client . put ( type ( self ) . api_endpoint , model = self , data = self . _serialize ( ) )
if 'error' in resp :
return False
return True |
def get_course_list_name ( curriculum_abbr , course_number , section_id , quarter , year , joint = False ) :
"""Return the list address of UW course email list""" | prefix = "multi_" if ( joint is True ) else ""
return COURSE_LIST_NAME . format ( prefix = prefix , curr_abbr = _get_list_name_curr_abbr ( curriculum_abbr ) , course_no = course_number , section_id = section_id . lower ( ) , quarter = quarter . lower ( ) [ : 2 ] , year = str ( year ) [ - 2 : ] ) |
def ccifrm ( frclss , clssid , lenout = _default_len_out ) :
"""Return the frame name , frame ID , and center associated with
a given frame class and class ID .
http : / / naif . jpl . nasa . gov / pub / naif / toolkit _ docs / C / cspice / ccifrm _ c . html
: param frclss : Class of frame .
: type frclss : int
: param clssid : Class ID of frame .
: type clssid : int
: param lenout : Maximum length of output string .
: type lenout : int
: return :
the frame name ,
frame ID ,
center .
: rtype : tuple""" | frclss = ctypes . c_int ( frclss )
clssid = ctypes . c_int ( clssid )
lenout = ctypes . c_int ( lenout )
frcode = ctypes . c_int ( )
frname = stypes . stringToCharP ( lenout )
center = ctypes . c_int ( )
found = ctypes . c_int ( )
libspice . ccifrm_c ( frclss , clssid , lenout , ctypes . byref ( frcode ) , frname , ctypes . byref ( center ) , ctypes . byref ( found ) )
return frcode . value , stypes . toPythonString ( frname ) , center . value , bool ( found . value ) |
def _generate_move ( cls , char , width = None , fill_char = None , bounce = False , reverse = True , back_char = None ) :
"""Yields strings that simulate movement of a character from left
to right . For use with ` BarSet . from _ char ` .
Arguments :
char : Character to move across the progress bar .
width : Width for the progress bar .
Default : cls . default _ width
fill _ char : String for empty space .
Default : cls . default _ fill _ char
bounce : Whether to move the character in both
directions .
reverse : Whether to start on the right side .
back _ char : Character to use for the bounce ' s backward
movement .
Default : ` char `""" | width = width or cls . default_width
char = str ( char )
filler = str ( fill_char or cls . default_fill_char ) * ( width - len ( char ) )
rangeargs = RangeMoveArgs ( ( 0 , width , 1 ) , ( width , 0 , - 1 ) , )
if reverse : # Reverse the arguments for range to start from the right .
# Not using swap , because the stopping point is different .
rangeargs = RangeMoveArgs ( ( width , - 1 , - 1 ) , ( 0 , width - 1 , 1 ) , )
yield from ( '' . join ( ( filler [ : i ] , char , filler [ i : ] ) ) for i in range ( * rangeargs . forward ) )
if bounce :
bouncechar = char if back_char is None else back_char
yield from ( '' . join ( ( filler [ : i ] , str ( bouncechar ) , filler [ i : ] ) ) for i in range ( * rangeargs . backward ) ) |
def load_version ( filename = 'fuzzyhashlib/version.py' ) :
"""Parse a _ _ version _ _ number from a source file""" | with open ( filename ) as source :
text = source . read ( )
match = re . search ( r"^__version__ = ['\"]([^'\"]*)['\"]" , text )
if not match :
msg = "Unable to find version number in {}" . format ( filename )
raise RuntimeError ( msg )
version = match . group ( 1 )
return version |
def get_selected_state ( self ) :
"""Returns the current selected state""" | form_key = "{}_review_state" . format ( self . form_id )
return self . request . get ( form_key , "default" ) |
def _process_items ( items , user_conf , error_protocol ) :
"""Parse metadata . Remove processed and sucessfully parsed items .
Returns sucessfully processed items .""" | def process_meta ( item , error_protocol ) :
try :
return item . _parse ( )
except Exception , e :
error_protocol . append ( "Can't parse %s: %s" % ( item . _get_filenames ( ) [ 0 ] , e . message ) )
if isinstance ( item , DataPair ) :
return item . ebook_file
# process all items and put them to output queue
out = [ ]
for item in items :
if isinstance ( item , EbookFile ) :
out . append ( item )
else :
out . append ( process_meta ( item , error_protocol ) )
out = filter ( lambda x : x , out )
# remove None items ( process _ meta ( ) fails )
# remove processed files
fn_pool = [ ]
soon_removed = out if conf_merger ( user_conf , "LEAVE_BAD_FILES" ) else items
for item in soon_removed :
fn_pool . extend ( item . _get_filenames ( ) )
_remove_files ( fn_pool )
return out |
def __load_omitted_parcov ( self ) :
"""private : set the omitted _ parcov attribute""" | if self . omitted_parcov_arg is None and self . omitted_par_arg is None :
raise Exception ( "ErrVar.__load_omitted_parcov: " + "both omitted args are None" )
# try to set omitted _ parcov by extracting from base parcov
if self . omitted_parcov_arg is None and self . omitted_par_arg is not None : # check to see if omitted par names are in parcov
found = True
for par_name in self . omitted_jco . col_names :
if par_name not in self . parcov . col_names :
found = False
break
if found : # need to access attribute directly , not view of attribute
self . __omitted_parcov = self . _LinearAnalysis__parcov . extract ( row_names = self . omitted_jco . col_names )
else :
self . logger . warn ( "ErrVar.__load_omitted_parun: " + "no omitted parcov arg passed: " + "setting omitted parcov as identity Matrix" )
self . __omitted_parcov = Cov ( x = np . ones ( self . omitted_jco . shape [ 1 ] ) , names = self . omitted_jco . col_names , isdiagonal = True )
elif self . omitted_parcov_arg is not None :
raise NotImplementedError ( ) |
def _update_solution_data ( self , s ) :
"""Returns the voltage angle and generator set - point vectors .""" | x = s [ "x" ]
# Va _ var = self . om . get _ var ( " Va " )
# Vm _ var = self . om . get _ var ( " Vm " )
# Pg _ var = self . om . get _ var ( " Pg " )
# Qg _ var = self . om . get _ var ( " Qg " )
Va = x [ self . _Va . i1 : self . _Va . iN + 1 ]
Vm = x [ self . _Vm . i1 : self . _Vm . iN + 1 ]
Pg = x [ self . _Pg . i1 : self . _Pg . iN + 1 ]
Qg = x [ self . _Qg . i1 : self . _Qg . iN + 1 ]
# f = 0.5 * dot ( x . T * HH , x ) + dot ( CC . T , x )
# s [ " f " ] = s [ " f " ] + C0
# Put the objective function value in the solution .
# solution [ " f " ] = f
return Va , Vm , Pg , Qg |
def _get_method ( self , rdata ) :
"""Returns jsonrpc request ' s method value .
InvalidRequestError will be raised if it ' s missing or is wrong type .
MethodNotFoundError will be raised if a method with given method name
does not exist .""" | if 'method' in rdata :
if not isinstance ( rdata [ 'method' ] , basestring ) :
raise InvalidRequestError
else :
raise InvalidRequestError
if rdata [ 'method' ] not in self . method_data . keys ( ) :
raise MethodNotFoundError
return rdata [ 'method' ] |
def do_get_acls ( self , params ) :
"""\x1b [1mNAME \x1b [0m
get _ acls - Gets ACLs for a given path
\x1b [1mSYNOPSIS \x1b [0m
get _ acls < path > [ depth ] [ ephemerals ]
\x1b [1mOPTIONS \x1b [0m
* depth : - 1 is no recursion , 0 is infinite recursion , N > 0 is up to N levels ( default : 0)
* ephemerals : include ephemerals ( default : false )
\x1b [1mEXAMPLES \x1b [0m
> get _ acls / zookeeper
[ ACL ( perms = 31 , acl _ list = [ ' ALL ' ] , id = Id ( scheme = u ' world ' , id = u ' anyone ' ) ) ]
> get _ acls / zookeeper - 1
/ zookeeper : [ ACL ( perms = 31 , acl _ list = [ ' ALL ' ] , id = Id ( scheme = u ' world ' , id = u ' anyone ' ) ) ]
/ zookeeper / config : [ ACL ( perms = 31 , acl _ list = [ ' ALL ' ] , id = Id ( scheme = u ' world ' , id = u ' anyone ' ) ) ]
/ zookeeper / quota : [ ACL ( perms = 31 , acl _ list = [ ' ALL ' ] , id = Id ( scheme = u ' world ' , id = u ' anyone ' ) ) ]""" | def replace ( plist , oldv , newv ) :
try :
plist . remove ( oldv )
plist . insert ( 0 , newv )
except ValueError :
pass
for path , acls in self . _zk . get_acls_recursive ( params . path , params . depth , params . ephemerals ) :
replace ( acls , READ_ACL_UNSAFE [ 0 ] , "WORLD_READ" )
replace ( acls , OPEN_ACL_UNSAFE [ 0 ] , "WORLD_ALL" )
self . show_output ( "%s: %s" , path , acls ) |
def slugify_argument ( func ) :
"""Wraps a function that returns a string , adding the ' slugify ' argument .
> > > slugified _ fn = slugify _ argument ( lambda * args , * * kwargs : " YOU ARE A NICE LADY " )
> > > slugified _ fn ( )
' YOU ARE A NICE LADY '
> > > slugified _ fn ( slugify = True )
' you - are - a - nice - lady '""" | @ six . wraps ( func )
def wrapped ( * args , ** kwargs ) :
if "slugify" in kwargs and kwargs [ 'slugify' ] :
return _slugify ( func ( * args , ** kwargs ) )
else :
return func ( * args , ** kwargs )
return wrapped |
def split_linear_constraints ( A , l , u ) :
"""Returns the linear equality and inequality constraints .""" | ieq = [ ]
igt = [ ]
ilt = [ ]
ibx = [ ]
for i in range ( len ( l ) ) :
if abs ( u [ i ] - l [ i ] ) <= EPS :
ieq . append ( i )
elif ( u [ i ] > 1e10 ) and ( l [ i ] > - 1e10 ) :
igt . append ( i )
elif ( l [ i ] <= - 1e10 ) and ( u [ i ] < 1e10 ) :
ilt . append ( i )
elif ( abs ( u [ i ] - l [ i ] ) > EPS ) and ( u [ i ] < 1e10 ) and ( l [ i ] > - 1e10 ) :
ibx . append ( i )
else :
raise ValueError
Ae = A [ ieq , : ]
Ai = sparse ( [ A [ ilt , : ] , - A [ igt , : ] , A [ ibx , : ] , - A [ ibx , : ] ] )
be = u [ ieq , : ]
bi = matrix ( [ u [ ilt ] , - l [ igt ] , u [ ibx ] , - l [ ibx ] ] )
return Ae , be , Ai , bi |
def epochs ( self ) :
"""Get epochs as generator
Returns
list of dict
each epoch is defined by start _ time and end _ time ( in s in reference
to the start of the recordings ) and a string of the sleep stage ,
and a string of the signal quality .
If you specify stages _ of _ interest , only epochs belonging to those
stages will be included ( can be an empty list ) .
Raises
IndexError
When there is no rater / epochs at all""" | if self . rater is None :
raise IndexError ( 'You need to have at least one rater' )
for one_epoch in self . rater . iterfind ( 'stages/epoch' ) :
epoch = { 'start' : int ( one_epoch . find ( 'epoch_start' ) . text ) , 'end' : int ( one_epoch . find ( 'epoch_end' ) . text ) , 'stage' : one_epoch . find ( 'stage' ) . text , 'quality' : one_epoch . find ( 'quality' ) . text }
yield epoch |
def get_filter_rule ( self , _filter , way = 'in' ) :
"""Return the filter rule
: param : _ filter a zobjects . FilterRule or the filter name
: param : way string discribing if filter is for ' in ' or ' out ' messages
: returns : a zobjects . FilterRule""" | if isinstance ( _filter , zobjects . FilterRule ) :
_filter = _filter . name
for f in self . get_filter_rules ( way = way ) :
if f . name == _filter :
return f
return None |
def write ( self , f , grp , name , data , type_string , options ) :
"""Writes an object ' s metadata to file .
Writes the Python object ' data ' to ' name ' in h5py . Group ' grp ' .
. . versionchanged : : 0.2
Arguements changed .
Parameters
f : h5py . File
The HDF5 file handle that is open .
grp : h5py . Group or h5py . File
The parent HDF5 Group ( or File if at ' / ' ) that contains the
object with the specified name .
name : str
Name of the object .
data
The object to write to file .
type _ string : str or None
The type string for ` data ` . If it is ` ` None ` ` , one will have
to be gotten by ` ` get _ type _ string ` ` .
options : hdf5storage . core . Options
hdf5storage options object .
Raises
NotImplementedError
If writing ' data ' to file is currently not supported .
hdf5storage . exceptions . TypeNotMatlabCompatibleError
If writing a type not compatible with MATLAB and
` options . action _ for _ matlab _ incompatible ` is set to
` ` ' error ' ` ` .
Notes
Must be overridden in a subclass because a
` ` NotImplementedError ` ` is thrown immediately .
See Also
hdf5storage . utilities . write _ data""" | raise NotImplementedError ( 'Can' 't write data type: ' + str ( type ( data ) ) ) |
def save ( self , parameterstep = None , simulationstep = None ) :
"""Save all defined auxiliary control files .
The target path is taken from the | ControlManager | object stored
in module | pub | . Hence we initialize one and override its
| property | ` currentpath ` with a simple | str | object defining the
test target path :
> > > from hydpy import pub
> > > pub . projectname = ' test '
> > > from hydpy . core . filetools import ControlManager
> > > class Test ( ControlManager ) :
. . . currentpath = ' test _ directory '
> > > pub . controlmanager = Test ( )
Normally , the control files would be written to disk , of course .
But to show ( and test ) the results in the following doctest ,
file writing is temporarily redirected via | Open | :
> > > from hydpy import dummies
> > > from hydpy import Open
> > > with Open ( ) :
. . . dummies . aux . save (
. . . parameterstep = ' 1d ' ,
. . . simulationstep = ' 12h ' )
test _ directory / file1 . py
# - * - coding : utf - 8 - * -
< BLANKLINE >
from hydpy . models . lland _ v1 import *
< BLANKLINE >
simulationstep ( ' 12h ' )
parameterstep ( ' 1d ' )
< BLANKLINE >
eqd1(200.0)
< BLANKLINE >
test _ directory / file2 . py
# - * - coding : utf - 8 - * -
< BLANKLINE >
from hydpy . models . lland _ v2 import *
< BLANKLINE >
simulationstep ( ' 12h ' )
parameterstep ( ' 1d ' )
< BLANKLINE >
eqd1(200.0)
eqd2(100.0)
< BLANKLINE >""" | par = parametertools . Parameter
for ( modelname , var2aux ) in self :
for filename in var2aux . filenames :
with par . parameterstep ( parameterstep ) , par . simulationstep ( simulationstep ) :
lines = [ parametertools . get_controlfileheader ( modelname , parameterstep , simulationstep ) ]
for par in getattr ( var2aux , filename ) :
lines . append ( repr ( par ) + '\n' )
hydpy . pub . controlmanager . save_file ( filename , '' . join ( lines ) ) |
def request_examples ( self , attack_config , criteria , run_counts , batch_size ) :
"""Returns a numpy array of integer example indices to run in the next batch .""" | raise NotImplementedError ( str ( type ( self ) ) + "needs to implement request_examples" ) |
def _bar ( self , target_name , current , total ) :
"""Make a progress bar out of info , which looks like :
(1/2 ) : [ # # # # # ] 100 % master . zip""" | bar_len = 30
if total is None :
state = 0
percent = "?% "
else :
total = int ( total )
state = int ( ( 100 * current ) / total ) if current < total else 100
percent = str ( state ) + "% "
if self . _n_total > 1 :
num = "({}/{}): " . format ( self . _n_finished + 1 , self . _n_total )
else :
num = ""
n_sh = int ( ( state * bar_len ) / 100 )
n_sp = bar_len - n_sh
pbar = "[" + "#" * n_sh + " " * n_sp + "] "
return num + pbar + percent + target_name |
def after_epoch ( self , epoch_id : int , epoch_data : EpochData ) -> None :
"""Call : py : meth : ` _ on _ plateau _ action ` if the ` ` long _ term ` `
variable mean is lower / greater than the ` ` short _ term ` ` mean .""" | super ( ) . after_epoch ( epoch_id = epoch_id , epoch_data = epoch_data )
self . _saved_loss . append ( epoch_data [ self . _stream ] [ self . _variable ] [ OnPlateau . _AGGREGATION ] )
long_mean = np . mean ( self . _saved_loss [ - self . _long_term : ] )
short_mean = np . mean ( self . _saved_loss [ - self . _short_term : ] )
if self . _objective == 'min' and long_mean < short_mean :
self . _on_plateau_action ( epoch_id = epoch_id , epoch_data = epoch_data )
elif self . _objective == 'max' and long_mean > short_mean :
self . _on_plateau_action ( epoch_id = epoch_id , epoch_data = epoch_data ) |
def detachPanel ( self ) :
"""Detaches the current panel as a floating window .""" | from projexui . widgets . xviewwidget import XViewDialog
dlg = XViewDialog ( self . _viewWidget , self . _viewWidget . viewTypes ( ) )
size = self . _currentPanel . size ( )
dlg . viewWidget ( ) . currentPanel ( ) . snagViewFromPanel ( self . _currentPanel )
dlg . resize ( size )
dlg . show ( ) |
def present ( name , running = None , clone_from = None , snapshot = False , profile = None , network_profile = None , template = None , options = None , image = None , config = None , fstype = None , size = None , backing = None , vgname = None , lvname = None , thinpool = None , path = None ) :
'''. . versionchanged : : 2015.8.0
The : mod : ` lxc . created < salt . states . lxc . created > ` state has been renamed
to ` ` lxc . present ` ` , and the : mod : ` lxc . cloned < salt . states . lxc . cloned > `
state has been merged into this state .
Create the named container if it does not exist
name
The name of the container to be created
path
path to the container parent
default : / var / lib / lxc ( system default )
. . versionadded : : 2015.8.0
running : False
* If ` ` True ` ` , ensure that the container is running
* If ` ` False ` ` , ensure that the container is stopped
* If ` ` None ` ` , do nothing with regards to the running state of the
container
. . versionadded : : 2015.8.0
clone _ from
Create named container as a clone of the specified container
snapshot : False
Use Copy On Write snapshots ( LVM ) . Only supported with ` ` clone _ from ` ` .
profile
Profile to use in container creation ( see the : ref : ` LXC Tutorial
< tutorial - lxc - profiles - container > ` for more information ) . Values in a
profile will be overridden by the parameters listed below .
network _ profile
Network Profile to use in container creation
( see the : ref : ` LXC Tutorial < tutorial - lxc - profiles - container > `
for more information ) . Values in a profile will be overridden by
the parameters listed below .
. . versionadded : : 2015.5.2
* * Container Creation Arguments * *
template
The template to use . For example , ` ` ubuntu ` ` or ` ` fedora ` ` .
For a full list of available templates , check out
the : mod : ` lxc . templates < salt . modules . lxc . templates > ` function .
Conflicts with the ` ` image ` ` argument .
. . note : :
The ` ` download ` ` template requires the following three parameters
to be defined in ` ` options ` ` :
* * * dist * * - The name of the distribution
* * * release * * - Release name / version
* * * arch * * - Architecture of the container
The available images can be listed using the : mod : ` lxc . images
< salt . modules . lxc . images > ` function .
options
. . versionadded : : 2015.5.0
Template - specific options to pass to the lxc - create command . These
correspond to the long options ( ones beginning with two dashes ) that
the template script accepts . For example :
. . code - block : : yaml
web01:
lxc . present :
- template : download
- options :
dist : centos
release : 6
arch : amd64
Remember to double - indent the options , due to : ref : ` how PyYAML works
< nested - dict - indentation > ` .
For available template options , refer to the lxc template scripts
which are ususally located under ` ` / usr / share / lxc / templates ` ` ,
or run ` ` lxc - create - t < template > - h ` ` .
image
A tar archive to use as the rootfs for the container . Conflicts with
the ` ` template ` ` argument .
backing
The type of storage to use . Set to ` ` lvm ` ` to use an LVM group .
Defaults to filesystem within / var / lib / lxc .
fstype
Filesystem type to use on LVM logical volume
size
Size of the volume to create . Only applicable if ` ` backing ` ` is set to
` ` lvm ` ` .
vgname : lxc
Name of the LVM volume group in which to create the volume for this
container . Only applicable if ` ` backing ` ` is set to ` ` lvm ` ` .
lvname
Name of the LVM logical volume in which to create the volume for this
container . Only applicable if ` ` backing ` ` is set to ` ` lvm ` ` .
thinpool
Name of a pool volume that will be used for thin - provisioning this
container . Only applicable if ` ` backing ` ` is set to ` ` lvm ` ` .''' | ret = { 'name' : name , 'result' : True , 'comment' : 'Container \'{0}\' already exists' . format ( name ) , 'changes' : { } }
if not any ( ( template , image , clone_from ) ) : # Take a peek into the profile to see if there is a clone source there .
# Otherwise , we ' re assuming this is a template / image creation . Also
# check to see if none of the create types are in the profile . If this
# is the case , then bail out early .
c_profile = __salt__ [ 'lxc.get_container_profile' ] ( profile )
if not any ( x for x in c_profile if x in ( 'template' , 'image' , 'clone_from' ) ) :
ret [ 'result' ] = False
ret [ 'comment' ] = ( 'No template, image, or clone_from parameter ' 'was found in either the state\'s arguments or ' 'the LXC profile' )
else :
try : # Assign the profile ' s clone _ from param to the state , so that
# we know to invoke lxc . clone to create the container .
clone_from = c_profile [ 'clone_from' ]
except KeyError :
pass
# Sanity check ( s )
if clone_from and not __salt__ [ 'lxc.exists' ] ( clone_from , path = path ) :
ret [ 'result' ] = False
ret [ 'comment' ] = ( 'Clone source \'{0}\' does not exist' . format ( clone_from ) )
if not ret [ 'result' ] :
return ret
action = 'cloned from {0}' . format ( clone_from ) if clone_from else 'created'
state = { 'old' : __salt__ [ 'lxc.state' ] ( name , path = path ) }
if __opts__ [ 'test' ] :
if state [ 'old' ] is None :
ret [ 'comment' ] = ( 'Container \'{0}\' will be {1}' . format ( name , 'cloned from {0}' . format ( clone_from ) if clone_from else 'created' ) )
ret [ 'result' ] = None
return ret
else :
if running is None : # Container exists and we ' re not managing whether or not it ' s
# running . Set the result back to True and return
return ret
elif running :
if state [ 'old' ] in ( 'frozen' , 'stopped' ) :
ret [ 'comment' ] = ( 'Container \'{0}\' would be {1}' . format ( name , 'unfrozen' if state [ 'old' ] == 'frozen' else 'started' ) )
ret [ 'result' ] = None
return ret
else :
ret [ 'comment' ] += ' and is running'
return ret
else :
if state [ 'old' ] in ( 'frozen' , 'running' ) :
ret [ 'comment' ] = ( 'Container \'{0}\' would be stopped' . format ( name ) )
ret [ 'result' ] = None
return ret
else :
ret [ 'comment' ] += ' and is stopped'
return ret
if state [ 'old' ] is None : # Container does not exist
try :
if clone_from :
result = __salt__ [ 'lxc.clone' ] ( name , clone_from , profile = profile , network_profile = network_profile , snapshot = snapshot , size = size , path = path , backing = backing )
else :
result = __salt__ [ 'lxc.create' ] ( name , profile = profile , network_profile = network_profile , template = template , options = options , image = image , config = config , fstype = fstype , size = size , backing = backing , vgname = vgname , path = path , lvname = lvname , thinpool = thinpool )
except ( CommandExecutionError , SaltInvocationError ) as exc :
ret [ 'result' ] = False
ret [ 'comment' ] = exc . strerror
else :
if clone_from :
ret [ 'comment' ] = ( 'Cloned container \'{0}\' as \'{1}\'' . format ( clone_from , name ) )
else :
ret [ 'comment' ] = 'Created container \'{0}\'' . format ( name )
state [ 'new' ] = result [ 'state' ] [ 'new' ]
if ret [ 'result' ] is True : # Enforce the " running " parameter
if running is None : # Don ' t do anything
pass
elif running :
c_state = __salt__ [ 'lxc.state' ] ( name , path = path )
if c_state == 'running' :
ret [ 'comment' ] += ' and is running'
else :
error = ', but it could not be started'
try :
start_func = 'lxc.unfreeze' if c_state == 'frozen' else 'lxc.start'
state [ 'new' ] = __salt__ [ start_func ] ( name , path = path ) [ 'state' ] [ 'new' ]
if state [ 'new' ] != 'running' :
ret [ 'result' ] = False
ret [ 'comment' ] += error
except ( SaltInvocationError , CommandExecutionError ) as exc :
ret [ 'result' ] = False
ret [ 'comment' ] += '{0}: {1}' . format ( error , exc )
else :
if state [ 'old' ] is None :
ret [ 'comment' ] += ', and the container was started'
else :
ret [ 'comment' ] = ( 'Container \'{0}\' was {1}' . format ( name , 'unfrozen' if state [ 'old' ] == 'frozen' else 'started' ) )
else :
c_state = __salt__ [ 'lxc.state' ] ( name , path = path )
if c_state == 'stopped' :
if state [ 'old' ] is not None :
ret [ 'comment' ] += ' and is stopped'
else :
error = ', but it could not be stopped'
try :
state [ 'new' ] = __salt__ [ 'lxc.stop' ] ( name , path = path ) [ 'state' ] [ 'new' ]
if state [ 'new' ] != 'stopped' :
ret [ 'result' ] = False
ret [ 'comment' ] += error
except ( SaltInvocationError , CommandExecutionError ) as exc :
ret [ 'result' ] = False
ret [ 'comment' ] += '{0}: {1}' . format ( error , exc )
else :
if state [ 'old' ] is None :
ret [ 'comment' ] += ', and the container was stopped'
else :
ret [ 'comment' ] = ( 'Container \'{0}\' was stopped' . format ( name ) )
if 'new' not in state : # Make sure we know the final state of the container before we return
state [ 'new' ] = __salt__ [ 'lxc.state' ] ( name , path = path )
if state [ 'old' ] != state [ 'new' ] :
ret [ 'changes' ] [ 'state' ] = state
return ret |
def parse_combo ( self , combo , modes_set , modifiers_set , pfx ) :
"""Parse a string into a mode , a set of modifiers and a trigger .""" | mode , mods , trigger = None , set ( [ ] ) , combo
if '+' in combo :
if combo . endswith ( '+' ) : # special case : probably contains the keystroke ' + '
trigger , combo = '+' , combo [ : - 1 ]
if '+' in combo :
items = set ( combo . split ( '+' ) )
else :
items = set ( combo )
else : # trigger is always specified last
items = combo . split ( '+' )
trigger , items = items [ - 1 ] , set ( items [ : - 1 ] )
if '*' in items :
items . remove ( '*' )
# modifier wildcard
mods = '*'
else :
mods = items . intersection ( modifiers_set )
mode = items . intersection ( modes_set )
if len ( mode ) == 0 :
mode = None
else :
mode = mode . pop ( )
if pfx is not None :
trigger = pfx + trigger
return ( mode , mods , trigger ) |
def fix_nonref_positions ( in_file , ref_file ) :
"""Fix Genotyping VCF positions where the bases are all variants .
The plink / pseq output does not handle these correctly , and
has all reference / variant bases reversed .""" | ignore_chrs = [ "." ]
ref2bit = twobit . TwoBitFile ( open ( ref_file ) )
out_file = in_file . replace ( "-raw.vcf" , ".vcf" )
with open ( in_file ) as in_handle :
with open ( out_file , "w" ) as out_handle :
for line in in_handle :
if line . startswith ( "#" ) :
out_handle . write ( line )
else :
parts = line . rstrip ( "\r\n" ) . split ( "\t" )
pos = int ( parts [ 1 ] )
# handle chr / non - chr naming
if parts [ 0 ] not in ref2bit . keys ( ) and parts [ 0 ] . replace ( "chr" , "" ) in ref2bit . keys ( ) :
parts [ 0 ] = parts [ 0 ] . replace ( "chr" , "" )
# handle X chromosome
elif parts [ 0 ] not in ref2bit . keys ( ) and parts [ 0 ] == "23" :
for test in [ "X" , "chrX" ] :
if test in ref2bit . keys ( ) :
parts [ 0 ] == test
ref_base = None
if parts [ 0 ] not in ignore_chrs :
try :
ref_base = ref2bit [ parts [ 0 ] ] . get ( pos - 1 , pos ) . upper ( )
except Exception as msg :
print "Skipping line. Failed to retrieve reference base for %s\n%s" % ( str ( parts ) , msg )
parts = fix_vcf_line ( parts , ref_base )
if parts is not None :
out_handle . write ( "\t" . join ( parts ) + "\n" )
return out_file |
def bbox_vert_aligned_center ( box1 , box2 ) :
"""Returns true if the center of both boxes is within 5 pts""" | if not ( box1 and box2 ) :
return False
return abs ( ( ( box1 . right + box1 . left ) / 2.0 ) - ( ( box2 . right + box2 . left ) / 2.0 ) ) <= 5 |
def context ( self , context ) :
"""Sets the context that Selenium commands are running in using
a ` with ` statement . The state of the context on the server is
saved before entering the block , and restored upon exiting it .
: param context : Context , may be one of the class properties
` CONTEXT _ CHROME ` or ` CONTEXT _ CONTENT ` .
Usage example : :
with selenium . context ( selenium . CONTEXT _ CHROME ) :
# chrome scope
. . . do stuff . . .""" | initial_context = self . execute ( 'GET_CONTEXT' ) . pop ( 'value' )
self . set_context ( context )
try :
yield
finally :
self . set_context ( initial_context ) |
def register ( self , type_ , handler ) :
"""注册事件处理函数监听""" | # 尝试获取该事件类型对应的处理函数列表 , 若无defaultDict会自动创建新的list
handlerList = self . __handlers [ type_ ]
# 若要注册的处理器不在该事件的处理器列表中 , 则注册该事件
if handler not in handlerList :
handlerList . append ( handler ) |
def _getter ( self ) :
"""Return a function object suitable for the " get " side of the property
descriptor . This default getter returns the child element with
matching tag name or | None | if not present .""" | def get_child_element ( obj ) :
return obj . find ( qn ( self . _nsptagname ) )
get_child_element . __doc__ = ( '``<%s>`` child element or |None| if not present.' % self . _nsptagname )
return get_child_element |
def verify ( cls , user_id , verification_hash ) :
"""Verify a user using the verification hash
The verification hash is removed from the user once verified
: param user _ id : the user ID
: param verification _ hash : the verification hash
: returns : a User instance""" | user = yield cls . get ( user_id )
# If user does not have verification hash then this means they have already been verified
if 'verification_hash' not in user . _resource :
raise Return ( user )
if user . verification_hash != verification_hash :
raise exceptions . ValidationError ( 'Invalid verification hash' )
del user . verification_hash
yield user . _save ( )
raise Return ( user ) |
def ceafe ( clusters , gold_clusters ) :
"""Computes the Constrained EntityAlignment F - Measure ( CEAF ) for evaluating coreference .
Gold and predicted mentions are aligned into clusterings which maximise a metric - in
this case , the F measure between gold and predicted clusters .
< https : / / www . semanticscholar . org / paper / On - Coreference - Resolution - Performance - Metrics - Luo / de133c1f22d0dfe12539e25dda70f28672459b99 >""" | clusters = [ cluster for cluster in clusters if len ( cluster ) != 1 ]
scores = np . zeros ( ( len ( gold_clusters ) , len ( clusters ) ) )
for i , gold_cluster in enumerate ( gold_clusters ) :
for j , cluster in enumerate ( clusters ) :
scores [ i , j ] = Scorer . phi4 ( gold_cluster , cluster )
matching = linear_assignment ( - scores )
similarity = sum ( scores [ matching [ : , 0 ] , matching [ : , 1 ] ] )
return similarity , len ( clusters ) , similarity , len ( gold_clusters ) |
def reduced_matrix_element ( fine_statei , fine_statej , convention = 1 ) :
r"""Return the reduced matrix element of the position operator in Bohr \
radii .
We have two available conventions for this
1 . - [ Racah ] _ and [ Edmonds74 ] _
. . math : :
\ langle \ gamma _ i , J _ i , M _ i | \ hat { T } ^ k _ q | \ gamma _ j , J _ j , M _ j \ rangle \
= ( - 1 ) ^ { J _ i - M _ i } \
\ left ( \ begin { matrix } J _ i & k & J _ j \ \ - M _ i & q & M _ j \ end { matrix } \ right ) \
\ langle \ gamma _ i , J _ i | | \ hat { T } ^ k | | \ gamma _ j , J _ j \
\ rangle _ \ mathrm { Racah }
2 . - [ Brink _ Satchler ] _
. . math : :
\ langle \ gamma _ i , J _ i , M _ i | \ hat { T } ^ k _ q | \ gamma _ j , J _ j , M _ j \ rangle \
= ( - 1 ) ^ { J _ i - M _ i } \ sqrt { 2J _ i + 1 } \
\ left ( \ begin { matrix } J _ i & k & J _ j \ \ - M _ i & q & M _ j \ end { matrix } \ right ) \
\ langle \ gamma _ i , J _ i | | \ hat { T } ^ k | | \ gamma _ j , J _ j \ rangle \
_ \ mathrm { Brink }
These two definitions of the reduced matrix element are related by
. . math : :
\ langle \ gamma _ i , J _ i | | \ hat { T } ^ k | | \ gamma _ j , J _ j \
\ rangle _ \ mathrm { Racah } = \ sqrt { 2J _ i + 1 } \
\ langle \ gamma _ i , J _ i | | \ hat { T } ^ k | | \ gamma _ j , J _ j \ rangle \
_ \ mathrm { Brink }
With the Racah element being symetric under argument exchange apart from a \
sign :
. . math : :
\ langle \ gamma _ j , J _ j | | ( \ hat { T } ^ k ) ^ \ dagger | | \ gamma _ i , J _ i \ rangle \
_ \ mathrm { Racah } = ( - 1 ) ^ { J _ j - J _ i } \
\ langle \ gamma _ i , J _ i | | \ hat { T } ^ k | | \ gamma _ j , J _ j \ rangle \
_ \ mathrm { Racah }
And the Brink element being asymetric under argument exchange :
. . math : :
\ langle \ gamma _ j , J _ j | | \ hat { T } ^ k | | \ gamma _ i , J _ i \ rangle \
_ \ mathrm { Brink } = ( - 1 ) ^ { J _ j - J _ i } \
\ frac { \ sqrt { 2J _ i + 1 } } { \ sqrt { 2J _ j + 1 } } \
\ langle \ gamma _ i , J _ i | | \ hat { T } ^ k | | \ gamma _ j , J _ j \ rangle \
_ \ mathrm { Brink }
References :
. . [ Brink _ Satchler ] Brink , D . M . and G . R . Satchler : 1994 . " Angular \
Momentum " . Oxford : Oxford University Press , 3rd edn . , 182 pages .
. . [ Racah ] Racah , G . : 1942 . " Theory of complex spectra II " . Phys . Rev . , \
62 438-462.
. . [ Edmonds74 ] A . R . Edmonds . Angular momentum in quantum mechanics .
Investigations in physics , 4 . ; Investigations in physics , no . 4.
Princeton , N . J . , Princeton University Press , 1957 . .
> > > g = State ( " Rb " , 87 , 5 , 0 , 1 / Integer ( 2 ) )
> > > e = State ( " Rb " , 87 , 5 , 1 , 3 / Integer ( 2 ) )
> > > print ( reduced _ matrix _ element ( g , e ) )
5.97785756147
> > > print ( reduced _ matrix _ element ( e , g ) )
-5.97785756146761
> > > print ( reduced _ matrix _ element ( g , e , convention = 2 ) )
4.22698361868
> > > print ( reduced _ matrix _ element ( e , g , convention = 2 ) )
-2.11349180934051""" | if fine_statei == fine_statej :
return 0.0
t = Transition ( fine_statei , fine_statej )
einsteinAij = t . einsteinA
omega0 = t . omega
Ji = fine_statei . j ;
Jj = fine_statej . j
factor = sqrt ( 3 * Pi * hbar * c ** 3 * epsilon0 ) / e
if omega0 < 0 :
rij = factor * sqrt ( ( 2 * Jj + 1 ) * einsteinAij / omega0 ** 3 ) / a0
else :
rij = reduced_matrix_element ( fine_statej , fine_statei , convention = convention )
rij *= ( - 1 ) ** ( Jj - Ji )
# We return the Brink matrix element .
if convention == 2 :
if omega0 < 0 :
rij = rij / sqrt ( 2 * Ji + 1 )
else :
rij = rij / sqrt ( 2 * Ji + 1 )
return rij |
def IterAssociatorInstances ( self , InstanceName , AssocClass = None , ResultClass = None , Role = None , ResultRole = None , IncludeQualifiers = None , IncludeClassOrigin = None , PropertyList = None , FilterQueryLanguage = None , FilterQuery = None , OperationTimeout = None , ContinueOnError = None , MaxObjectCount = DEFAULT_ITER_MAXOBJECTCOUNT , ** extra ) : # pylint : disable = invalid - name , line - too - long
"""Retrieve the instances associated to a source instance , using the
Python : term : ` py : generator ` idiom to return the result .
* New in pywbem 0.10 as experimental and finalized in 0.12 . *
This method uses the corresponding pull operations if supported by the
WBEM server or otherwise the corresponding traditional operation .
This method is an alternative to using the pull operations directly ,
that frees the user of having to know whether the WBEM server supports
pull operations .
This method is a generator function that retrieves instances from
the WBEM server and returns them one by one ( using : keyword : ` yield ` )
when the caller iterates through the returned generator object . The
number of instances that are retrieved from the WBEM server in one
request ( and thus need to be materialized in this method ) is up to the
` MaxObjectCount ` parameter if the corresponding pull operations are
used , or the complete result set all at once if the corresponding
traditional operation is used .
By default , this method attempts to perform the corresponding pull
operations
( : meth : ` ~ pywbem . WBEMConnection . OpenAssociatorInstances ` and
: meth : ` ~ pywbem . WBEMConnection . PullInstancesWithPath ` ) .
If these pull operations are not supported by the WBEM server , this
method falls back to using the corresponding traditional operation
( : meth : ` ~ pywbem . WBEMConnection . Associators ` ) .
Whether the WBEM server supports these pull operations is remembered
in the : class : ` ~ pywbem . WBEMConnection ` object ( by operation type ) , and
avoids unnecessary attempts to try these pull operations on that
connection in the future .
The ` use _ pull _ operations ` init parameter of
: class : ` ~ pywbem . WBEMConnection ` can be used to control the preference
for always using pull operations , always using traditional operations ,
or using pull operations if supported by the WBEM server ( the default ) .
This method provides all of the controls of the corresponding pull
operations except for the ability to set different response sizes on
each request ; the response size ( defined by the ` MaxObjectCount `
parameter ) is the same for all pull operations in the enumeration
session .
In addition , some functionality is only available if the corresponding
pull operations are used by this method :
* Filtering is not supported for the corresponding traditional
operation so that setting the ` FilterQuery ` or ` FilterQueryLanguage `
parameters will be rejected if the corresponding traditional
operation is used by this method .
Note that this limitation is not a disadvantage compared to using the
corresponding pull operations directly , because in both cases , the
WBEM server must support the pull operations and their filtering
capability in order for the filtering to work .
* Setting the ` ContinueOnError ` parameter to ` True ` will be rejected if
the corresponding traditional operation is used by this method .
The enumeration session that is opened with the WBEM server when using
pull operations is closed automatically when the returned generator
object is exhausted , or when the generator object is closed using its
: meth : ` ~ py : generator . close ` method ( which may also be called before the
generator is exhausted ) .
Parameters :
InstanceName ( : class : ` ~ pywbem . CIMInstanceName ` ) :
The instance path of the source instance .
If this object does not specify a namespace , the default namespace
of the connection is used .
Its ` host ` attribute will be ignored .
AssocClass ( : term : ` string ` or : class : ` ~ pywbem . CIMClassName ` ) :
Class name of an association class ( case independent ) ,
to filter the result to include only traversals of that association
class ( or subclasses ) .
` None ` means that no such filtering is peformed .
ResultClass ( : term : ` string ` or : class : ` ~ pywbem . CIMClassName ` ) :
Class name of an associated class ( case independent ) ,
to filter the result to include only traversals to that associated
class ( or subclasses ) .
` None ` means that no such filtering is peformed .
Role ( : term : ` string ` ) :
Role name ( = property name ) of the source end ( case independent ) ,
to filter the result to include only traversals from that source
role .
` None ` means that no such filtering is peformed .
ResultRole ( : term : ` string ` ) :
Role name ( = property name ) of the far end ( case independent ) ,
to filter the result to include only traversals to that far
role .
` None ` means that no such filtering is peformed .
IncludeQualifiers ( : class : ` py : bool ` ) :
Indicates that qualifiers are to be included in the returned
instances ( or classes ) , as follows :
* If ` False ` , qualifiers are not included .
* If ` True ` , qualifiers are included if the WBEM server implements
support for this parameter .
* If ` None ` , this parameter is not passed to the WBEM server , and
causes the server - implemented default to be used . : term : ` DSP0200 `
defines that the server - implemented default is ` False ` .
This parameter has been deprecated in : term : ` DSP0200 ` . Clients
cannot rely on qualifiers to be returned in this operation .
IncludeClassOrigin ( : class : ` py : bool ` ) :
Indicates that class origin information is to be included on each
property or method in the returned instances ( or classes ) , as
follows :
* If ` False ` , class origin information is not included .
* If ` True ` , class origin information is included .
* If ` None ` , this parameter is not passed to the WBEM server , and
causes the server - implemented default to be used . : term : ` DSP0200 `
defines that the server - implemented default is ` False ` .
This parameter has been deprecated in : term : ` DSP0200 ` . WBEM servers
may either implement this parameter as specified , or may treat any
specified value as ` False ` .
PropertyList ( : term : ` string ` or : term : ` py : iterable ` of : term : ` string ` ) :
An iterable specifying the names of the properties ( or a string
that defines a single property ) to be included in the returned
instances ( case independent ) .
An empty iterable indicates to include no properties .
If ` None ` , all properties are included .
FilterQueryLanguage ( : term : ` string ` ) :
The name of the filter query language used for the ` FilterQuery `
parameter . The DMTF - defined Filter Query Language ( see
: term : ` DSP0212 ` ) is specified as " DMTF : FQL " .
If this parameter is not ` None ` and the traditional operation is
used by this method , : exc : ` ~ py : exceptions . ValueError ` will be
raised .
FilterQuery ( : term : ` string ` ) :
The filter query in the query language defined by the
` FilterQueryLanguage ` parameter .
If this parameter is not ` None ` and the traditional operation is
used by this method , : exc : ` ~ py : exceptions . ValueError ` will be
raised .
OperationTimeout ( : class : ` ~ pywbem . Uint32 ` ) :
Minimum time in seconds the WBEM Server shall maintain an open
enumeration session after a previous Open or Pull request is
sent to the client . Once this timeout time has expired , the
WBEM server may close the enumeration session .
* If not ` None ` , this parameter is sent to the WBEM server as the
proposed timeout for the enumeration session . A value of 0
indicates that the server is expected to never time out . The
server may reject the proposed value , causing a
: class : ` ~ pywbem . CIMError ` to be raised with status code
: attr : ` ~ pywbem . CIM _ ERR _ INVALID _ OPERATION _ TIMEOUT ` .
* If ` None ` , this parameter is not passed to the WBEM server , and
causes the server - implemented default timeout to be used .
Many WBEM servers do not support this request attribute so that
setting it to ` True ` is NOT recommended except in special cases .
If this parameter is ` True ` and the traditional operation is used
by this method , : exc : ` ~ py : exceptions . ValueError ` will be raised .
ContinueOnError ( : class : ` py : bool ` ) :
Indicates to the WBEM server to continue sending responses
after an error response has been sent .
* If ` True ` , the server is to continue sending responses after
sending an error response . Not all servers support continuation
on error ; a server that does not support it must send an error
response if ` True ` was specified , causing
: class : ` ~ pywbem . CIMError ` to be raised with status code
: attr : ` ~ pywbem . CIM _ ERR _ CONTINUATION _ ON _ ERROR _ NOT _ SUPPORTED ` .
If the corresponding traditional operation is used by this
method , : exc : ` ~ py : exceptions . ValueError ` will be raised .
* If ` False ` , the server is requested to close the enumeration after
sending an error response .
* If ` None ` , this parameter is not passed to the WBEM server , and
causes the server - implemented default behaviour to be used .
: term : ` DSP0200 ` defines that the server - implemented default is
` False ` .
MaxObjectCount ( : class : ` ~ pywbem . Uint32 ` )
Maximum number of instances the WBEM server may return for each of
the open and pull requests issued during the iterations over the
returned generator object .
* If positive , the WBEM server is to return no more than the
specified number of instances .
* Zero is not allowed ; it would mean that zero instances
are to be returned for open and all pull requests issued to the
server .
* The default is defined as a system config variable .
* ` None ` is not allowed .
* * extra :
Additional keyword arguments are passed as additional operation
parameters to the WBEM server .
Note that : term : ` DSP0200 ` does not define any additional parameters
for this operation .
Returns :
: term : ` py : generator ` iterating : class : ` ~ pywbem . CIMInstance ` :
A generator object that iterates the resulting CIM instances .
These instances include an instance path that has its host and
namespace components set .
Raises :
Exceptions described in : class : ` ~ pywbem . WBEMConnection ` .
Example : :
insts _ generator = conn . IterAssociatorInstances ( ' CIM _ Blah . key = 1 ' , . . . )
for inst in insts _ generator :
# close if a particular property value found
if inst . get ( ' thisPropertyName ' ) = = 0
insts _ generator . close ( )
break
else :
print ( ' instance { 0 } ' . format ( inst . tomof ( ) ) )""" | # noqa : E501
# Must be positive integer gt zero
_validateIterCommonParams ( MaxObjectCount , OperationTimeout )
# Common variable for pull result tuple used by pulls and finally :
pull_result = None
try : # try / finally block to allow iter . close ( )
if ( self . _use_assoc_inst_pull_operations is None or self . _use_assoc_inst_pull_operations ) :
try : # operation try block
pull_result = self . OpenAssociatorInstances ( InstanceName , AssocClass = AssocClass , ResultClass = ResultClass , Role = Role , ResultRole = ResultRole , IncludeQualifiers = IncludeQualifiers , IncludeClassOrigin = IncludeClassOrigin , PropertyList = PropertyList , FilterQueryLanguage = FilterQueryLanguage , FilterQuery = FilterQuery , OperationTimeout = OperationTimeout , ContinueOnError = ContinueOnError , MaxObjectCount = MaxObjectCount , ** extra )
# Open operation succeeded ; set has _ pull flag
self . _use_assoc_inst_pull_operations = True
for inst in pull_result . instances :
yield inst
# Loop to pull while more while eos not returned .
while not pull_result . eos :
pull_result = self . PullInstancesWithPath ( pull_result . context , MaxObjectCount = MaxObjectCount )
for inst in pull_result . instances :
yield inst
pull_result = None
# clear the pull _ result
return
# If NOT _ SUPPORTED and first request , set flag and try
# alternative request operation .
# If _ use _ assoc _ inst _ pull _ operations is True , always raise
# the exception
except CIMError as ce :
if ( self . _use_assoc_inst_pull_operations is None and ce . status_code == CIM_ERR_NOT_SUPPORTED ) :
self . _use_assoc_inst_pull_operations = False
else :
raise
# Alternate request if Pull not implemented . This does not allow
# the FilterQuery or ContinueOnError
assert self . _use_assoc_inst_pull_operations is False
if FilterQuery is not None or FilterQueryLanguage is not None :
raise ValueError ( 'Associators does not support' ' FilterQuery.' )
if ContinueOnError is not None :
raise ValueError ( 'Associators does not support ' 'ContinueOnError.' )
enum_rslt = self . Associators ( InstanceName , AssocClass = AssocClass , ResultClass = ResultClass , Role = Role , ResultRole = ResultRole , IncludeQualifiers = IncludeQualifiers , IncludeClassOrigin = IncludeClassOrigin , PropertyList = PropertyList , ** extra )
for inst in enum_rslt :
yield inst
# Cleanup if caller closes the iterator before exhausting it
finally : # Cleanup only required if the pull context is open and not complete
if pull_result is not None and not pull_result . eos :
self . CloseEnumeration ( pull_result . context )
pull_result = None |
def verify_cot_signatures ( chain ) :
"""Verify the signatures of the chain of trust artifacts populated in ` ` download _ cot ` ` .
Populate each link . cot with the chain of trust json body .
Args :
chain ( ChainOfTrust ) : the chain of trust to add to .
Raises :
CoTError : on failure .""" | for link in chain . links :
unsigned_path = link . get_artifact_full_path ( 'public/chain-of-trust.json' )
ed25519_signature_path = link . get_artifact_full_path ( 'public/chain-of-trust.json.sig' )
verify_link_ed25519_cot_signature ( chain , link , unsigned_path , ed25519_signature_path ) |
def savestate ( self , state , chain = - 1 ) :
"""Store a dictionnary containing the state of the Model and its
StepMethods .""" | cur_chain = self . _chains [ chain ]
if hasattr ( cur_chain , '_state_' ) :
cur_chain . _state_ [ 0 ] = state
else :
s = self . _h5file . create_vlarray ( cur_chain , '_state_' , tables . ObjectAtom ( ) , title = 'The saved state of the sampler' , filters = self . filter )
s . append ( state )
self . _h5file . flush ( ) |
def clean_add_strdec ( * args , prec = 28 ) :
"""add two columns that contain numbers as strings""" | # load modules
import pandas as pd
import numpy as np
import re
from decimal import Decimal , getcontext
getcontext ( ) . prec = prec
# initialize result as 0.0
def proc_elem ( * args ) :
t = Decimal ( '0.0' )
for a in args :
if isinstance ( a , str ) :
a = re . sub ( '[^0-9\.\-]+' , '' , a )
if a and pd . notnull ( a ) :
t += Decimal ( a )
return str ( t )
def proc_list ( arr ) :
return [ proc_elem ( * row ) for row in arr ]
def proc_ndarray ( arr ) :
return np . array ( proc_list ( arr ) )
# transform depending on provided datatypes
if isinstance ( args [ 0 ] , ( list , tuple ) ) :
return proc_list ( args [ 0 ] )
elif isinstance ( args [ 0 ] , np . ndarray ) :
return proc_ndarray ( args [ 0 ] )
elif isinstance ( args [ 0 ] , pd . DataFrame ) :
return pd . DataFrame ( proc_ndarray ( args [ 0 ] . values ) , index = args [ 0 ] . index )
else :
return proc_elem ( * args ) |
def sendcmd ( self , cmd = 'AT' , timeout = 1.0 ) :
"""send command , wait for response . returns response from modem .""" | import time
if self . write ( cmd ) :
while self . get_response ( ) == '' and timeout > 0 :
time . sleep ( 0.1 )
timeout -= 0.1
return self . get_lines ( ) |
def get_term_category_frequencies ( self , scatterchartdata ) :
'''Parameters
scatterchartdata : ScatterChartData
Returns
pd . DataFrame''' | df = self . term_category_freq_df . rename ( columns = { c : c + ' freq' for c in self . term_category_freq_df } )
df . index . name = 'term'
return df |
def append ( self , state , symbol , action , destinationstate , production = None ) :
"""Appends a new rule""" | if action not in ( None , "Accept" , "Shift" , "Reduce" ) :
raise TypeError
rule = { "action" : action , "dest" : destinationstate }
if action == "Reduce" :
if rule is None :
raise TypeError ( "Expected production parameter" )
rule [ "rule" ] = production
while isinstance ( symbol , TerminalSymbol ) and isinstance ( symbol . gd , Iterable ) and len ( symbol . gd ) == 1 and isinstance ( list ( symbol . gd ) [ 0 ] , Grammar ) :
symbol = TerminalSymbol ( list ( symbol . gd ) [ 0 ] )
# Reduces symbol if its gd is a Sequence / Choice of 1 element
if not isinstance ( symbol , Symbol ) :
raise TypeError ( "Expected symbol, got %s" % symbol )
self [ state ] [ symbol ] = rule |
def create_random ( cls , length , ** kwargs ) :
"""Return a new instance of this gene class with random DNA ,
with characters chosen from ` ` GENETIC _ MATERIAL _ OPTIONS ` ` .
length : the number of characters in the randomized DNA
* * kwargs : forwarded to the ` ` cls ` ` constructor""" | dna = '' . join ( [ random . choice ( cls . GENETIC_MATERIAL_OPTIONS ) for _ in range ( length ) ] )
return cls ( dna , ** kwargs ) |
def _dispatch_command ( reactor , cfg , command ) :
"""Internal helper . This calls the given command ( a no - argument
callable ) with the Config instance in cfg and interprets any
errors for the user .""" | cfg . timing . add ( "command dispatch" )
cfg . timing . add ( "import" , when = start , which = "top" ) . finish ( when = top_import_finish )
try :
yield maybeDeferred ( command )
except ( WrongPasswordError , NoTorError ) as e :
msg = fill ( "ERROR: " + dedent ( e . __doc__ ) )
print ( msg , file = cfg . stderr )
raise SystemExit ( 1 )
except ( WelcomeError , UnsendableFileError , KeyFormatError ) as e :
msg = fill ( "ERROR: " + dedent ( e . __doc__ ) )
print ( msg , file = cfg . stderr )
print ( six . u ( "" ) , file = cfg . stderr )
print ( six . text_type ( e ) , file = cfg . stderr )
raise SystemExit ( 1 )
except TransferError as e :
print ( u"TransferError: %s" % six . text_type ( e ) , file = cfg . stderr )
raise SystemExit ( 1 )
except ServerConnectionError as e :
msg = fill ( "ERROR: " + dedent ( e . __doc__ ) ) + "\n"
msg += "(relay URL was %s)\n" % e . url
msg += six . text_type ( e )
print ( msg , file = cfg . stderr )
raise SystemExit ( 1 )
except Exception as e : # this prints a proper traceback , whereas
# traceback . print _ exc ( ) just prints a TB to the " yield "
# line above . . .
Failure ( ) . printTraceback ( file = cfg . stderr )
print ( u"ERROR:" , six . text_type ( e ) , file = cfg . stderr )
raise SystemExit ( 1 )
cfg . timing . add ( "exit" )
if cfg . dump_timing :
cfg . timing . write ( cfg . dump_timing , cfg . stderr ) |
def reversed ( self ) :
"""returns a copy of the CubicBezier object with its orientation
reversed .""" | new_cub = CubicBezier ( self . end , self . control2 , self . control1 , self . start )
if self . _length_info [ 'length' ] :
new_cub . _length_info = self . _length_info
new_cub . _length_info [ 'bpoints' ] = ( self . end , self . control2 , self . control1 , self . start )
return new_cub |
def get_scheduling_block ( sub_array_id , block_id ) :
"""Return the list of scheduling blocks instances associated with the sub
array""" | block_ids = DB . get_sub_array_sbi_ids ( sub_array_id )
if block_id in block_ids :
block = DB . get_block_details ( [ block_id ] ) . __next__ ( )
return block , HTTPStatus . OK
return dict ( error = "unknown id" ) , HTTPStatus . NOT_FOUND |
def _convert_content ( self , rdtype , content ) :
"""Converts type dependent record content into well formed and fully qualified
content for domain zone and returns content .""" | if rdtype == 'TXT' :
if content [ 0 ] != '"' :
content = '"' + content
if content [ - 1 ] != '"' :
content += '"'
if rdtype in ( 'CNAME' , 'MX' , 'NS' , 'SRV' ) :
if content [ - 1 ] != '.' :
content = self . _fqdn_name ( content )
return content |
def make_unique_endings ( strings_collection ) :
"""Make each string in the collection end with a unique character .
Essential for correct builiding of a generalized annotated suffix tree .
Returns the updated strings collection , encoded in Unicode .
max strings _ collection ~ 1.100.000""" | res = [ ]
for i in range ( len ( strings_collection ) ) : # NOTE ( msdubov ) : a trick to handle ' narrow ' python installation issues .
hex_code = hex ( consts . String . UNICODE_SPECIAL_SYMBOLS_START + i )
hex_code = r"\U" + "0" * ( 8 - len ( hex_code ) + 2 ) + hex_code [ 2 : ]
res . append ( strings_collection [ i ] + hex_code . decode ( "unicode-escape" ) )
return res |
def _crossover ( self , ind ) :
"""Used by the evolution process to generate a new individual .
Notes
This is a tweaked version of the classical DE crossover
algorithm , the main difference that candidate parameters are
generated using a lognormal distribution . Bound handling is
achieved by resampling where the candidate solution exceeds + / - 1
Parameters
ind : deap individual
Returns
y : deap individual
An individual representing a candidate solution , to be
assigned a fitness .""" | if self . neighbours :
a , b , c = random . sample ( [ self . population [ i ] for i in ind . neighbours ] , 3 )
else :
a , b , c = random . sample ( self . population , 3 )
y = self . toolbox . clone ( a )
y . ident = ind . ident
y . neighbours = ind . neighbours
del y . fitness . values
# y should now be a copy of ind with the vector elements from a
ident = random . randrange ( len ( self . value_means ) )
for i , value in enumerate ( y ) :
if i == ident or random . random ( ) < self . cxpb :
entry = a [ i ] + random . lognormvariate ( - 1.2 , 0.5 ) * self . diff_weight * ( b [ i ] - c [ i ] )
tries = 0
while abs ( entry ) > 1.0 :
tries += 1
entry = a [ i ] + random . lognormvariate ( - 1.2 , 0.5 ) * self . diff_weight * ( b [ i ] - c [ i ] )
if tries > 10000 :
entry = a [ i ]
y [ i ] = entry
return y |
def notify ( request ) :
'''This view gets a POST request from the Javascript part of the
AutoreloadPanel that contains a body that looks like : :
template = / full / path / to / template . html & template = / another / template . eml : 123456789&
media = / static / url / to / a / file : 133456780 & media = http : / / media . localhost . local / base . css
It is a list of template paths and a list of URLs that are part of the
static / media directories of the project . The filename might be followed by
a unix - epoch timestamp of the last modified date , seperated by a colon .
The view then blocks the response as long until one of the specified files
has a modified - time that is newer than the specified timestamp . It will
return a line seperated list of those changed files .
The view might also return with an empty response and status 204 ( No
Content ) if the source code that the development server runs was modified .
This is needed to free the current thread and allow django ' s runserver
command to reload the source code , to take those changes into account .''' | def get_resources ( names , resource_class ) :
resources = [ ]
for name in names :
timestamp = None
if ':' in name :
name , timestamp = name . split ( ':' , 1 )
try :
timestamp = float ( timestamp )
except ( ValueError , TypeError ) :
timestamp = None
resources . append ( resource_class ( name , timestamp ) )
return resources
resources = get_resources ( request . REQUEST . getlist ( 'template' ) , Resource )
resources += get_resources ( request . REQUEST . getlist ( 'media' ) , MediaResource )
file_watcher = FileWatcher ( resources )
suspender = Suspender ( )
updates = None
while not updates :
time . sleep ( 0.5 )
# break the watching action and return a response to release the
# running thread . This is necessary since the looped check would
# prevent django from loading changed source code or quitting the
# development server with CTRL - C
if suspender . should_suspend ( ) :
response = HttpResponse ( )
response . status_code = 204
return response
updates = file_watcher . get_updated_files ( )
response = HttpResponse ( json . dumps ( [ { 'src' : resource . name , 'mtime' : resource . mtime } for resource in updates ] ) )
return response |
def get_backup ( self , id_or_uri ) :
"""Get the details for the backup from an Artifact Bundle .
Args :
id _ or _ uri : ID or URI of the Artifact Bundle .
Returns :
Dict : Backup for an Artifacts Bundle .""" | uri = self . BACKUPS_PATH + '/' + extract_id_from_uri ( id_or_uri )
return self . _client . get ( id_or_uri = uri ) |
def setup_recent_files_menu ( self ) :
"""Setup the recent files menu and manager""" | self . recent_files_manager = widgets . RecentFilesManager ( 'pyQode' , 'notepad' )
self . menu_recents = widgets . MenuRecentFiles ( self . menuFile , title = 'Recents' , recent_files_manager = self . recent_files_manager )
self . menu_recents . open_requested . connect ( self . open_file )
self . menuFile . insertMenu ( self . actionSave , self . menu_recents )
self . menuFile . insertSeparator ( self . actionSave ) |
def interact_gridsearch_result_images ( show_result_func , cfgdict_list , cfglbl_list , cfgresult_list , score_list = None , fnum = None , figtitle = '' , unpack = False , max_plots = 25 , verbose = True , precision = 3 , scorelbl = 'score' , onclick_func = None ) :
"""helper function for visualizing results of gridsearch""" | assert callable ( show_result_func ) , 'NEED FUNCTION GOT: %r' % ( show_result_func , )
import utool as ut
import plottool as pt
from plottool import plot_helpers as ph
from plottool import interact_helpers as ih
if verbose :
print ( 'Plotting gridsearch results figtitle=%r' % ( figtitle , ) )
if score_list is None :
score_list = [ None ] * len ( cfgdict_list )
else : # sort by score if available
sortx_list = ut . list_argsort ( score_list , reverse = True )
score_list = ut . take ( score_list , sortx_list )
cfgdict_list = ut . take ( cfgdict_list , sortx_list )
cfglbl_list = ut . take ( cfglbl_list , sortx_list )
cfgresult_list = ut . take ( cfgresult_list , sortx_list )
# Dont show too many results only the top few
score_list = ut . listclip ( score_list , max_plots )
# Show the config results
fig = pt . figure ( fnum = fnum )
# Get plots for each of the resutls
nRows , nCols = pt . get_square_row_cols ( len ( score_list ) , fix = True )
next_pnum = pt . make_pnum_nextgen ( nRows , nCols )
for cfgdict , cfglbl , cfgresult , score in zip ( cfgdict_list , cfglbl_list , cfgresult_list , score_list ) :
if score is not None :
cfglbl += '\n' + scorelbl + '=' + ut . repr2 ( score , precision = precision )
pnum = next_pnum ( )
try :
if unpack :
show_result_func ( * cfgresult , fnum = fnum , pnum = pnum )
else :
show_result_func ( cfgresult , fnum = fnum , pnum = pnum )
except Exception as ex :
if isinstance ( cfgresult , tuple ) : # print ( ut . repr4 ( cfgresult ) )
print ( ut . depth_profile ( cfgresult ) )
print ( ut . list_type_profile ( cfgresult ) )
ut . printex ( ex , 'error showing' , keys = [ 'cfgresult' , 'fnum' , 'pnum' ] )
raise
# pt . imshow ( 255 * cfgresult , fnum = fnum , pnum = next _ pnum ( ) , title = cfglbl )
ax = pt . gca ( )
pt . set_title ( cfglbl , ax = ax )
# , size )
ph . set_plotdat ( ax , 'cfgdict' , cfgdict )
ph . set_plotdat ( ax , 'cfglbl' , cfglbl )
ph . set_plotdat ( ax , 'cfgresult' , cfgresult )
# Define clicked callback
def on_clicked ( event ) :
print ( '\n[pt] clicked gridsearch axes' )
if event is None or event . xdata is None or event . inaxes is None :
print ( 'out of axes' )
pass
else :
ax = event . inaxes
plotdat_dict = ph . get_plotdat_dict ( ax )
print ( ut . repr4 ( plotdat_dict ) )
cfglbl = ph . get_plotdat ( ax , 'cfglbl' , None )
cfgdict = ph . get_plotdat ( ax , 'cfgdict' , { } )
cfgresult = ph . get_plotdat ( ax , 'cfgresult' , { } )
infostr_list = [ ( 'cfglbl = %s' % ( cfglbl , ) ) , '' , ( 'cfgdict = ' + ut . repr4 ( cfgdict , sorted_ = True ) ) , ]
# Call a user defined function if given
if onclick_func is not None :
if unpack :
onclick_func ( * cfgresult )
else :
onclick_func ( cfgresult )
infostr = ut . msgblock ( 'CLICKED' , '\n' . join ( infostr_list ) )
print ( infostr )
# Connect callbacks
ih . connect_callback ( fig , 'button_press_event' , on_clicked )
pt . set_figtitle ( figtitle ) |
def get_transactions ( self , date_from : datetime , date_to : datetime ) -> List [ Transaction ] :
"""Returns account transactions""" | assert isinstance ( date_from , datetime )
assert isinstance ( date_to , datetime )
# fix up the parameters as we need datetime
dt_from = Datum ( )
dt_from . from_datetime ( date_from )
dt_from . start_of_day ( )
dt_to = Datum ( )
dt_to . from_datetime ( date_to )
dt_to . end_of_day ( )
query = ( self . book . session . query ( Transaction ) . join ( Split ) . filter ( Split . account_guid == self . account . guid ) . filter ( Transaction . post_date >= dt_from . date , Transaction . post_date <= dt_to . date ) . order_by ( Transaction . post_date ) )
return query . all ( ) |
def numeric_function_clean_dataframe ( self , axis ) :
"""Preprocesses numeric functions to clean dataframe and pick numeric indices .
Args :
axis : ' 0 ' if columns and ' 1 ' if rows .
Returns :
Tuple with return value ( if any ) , indices to apply func to & cleaned Manager .""" | result = None
query_compiler = self
# If no numeric columns and over columns , then return empty Series
if not axis and len ( self . index ) == 0 :
result = pandas . Series ( dtype = np . int64 )
nonnumeric = [ col for col , dtype in zip ( self . columns , self . dtypes ) if not is_numeric_dtype ( dtype ) ]
if len ( nonnumeric ) == len ( self . columns ) : # If over rows and no numeric columns , return this
if axis :
result = pandas . Series ( [ np . nan for _ in self . index ] )
else :
result = pandas . Series ( [ 0 for _ in self . index ] )
else :
query_compiler = self . drop ( columns = nonnumeric )
return result , query_compiler |
def add ( self , elt ) :
"""Generic function to add objects to the daemon internal lists .
Manage Broks , External commands
: param elt : objects to add
: type elt : alignak . AlignakObject
: return : None""" | if isinstance ( elt , Brok ) : # For brok , we tag the brok with our instance _ id
elt . instance_id = self . instance_id
if elt . type == 'monitoring_log' : # The brok is a monitoring event
with self . events_lock :
self . events . append ( elt )
statsmgr . counter ( 'events' , 1 )
else :
with self . broks_lock :
self . broks . append ( elt )
statsmgr . counter ( 'broks.added' , 1 )
elif isinstance ( elt , ExternalCommand ) :
logger . debug ( "Queuing an external command '%s'" , str ( elt . __dict__ ) )
with self . external_commands_lock :
self . external_commands . append ( elt )
statsmgr . counter ( 'external-commands.added' , 1 )
else : # pragma : no cover , simple dev alerting
logger . error ( 'Do not manage object type %s (%s)' , type ( elt ) , elt ) |
def vm_ip ( cls , vm_id ) :
"""Return the first usable ip address for this vm .
Returns a ( version , ip ) tuple .""" | vm_info = cls . info ( vm_id )
for iface in vm_info [ 'ifaces' ] :
if iface [ 'type' ] == 'private' :
continue
for ip in iface [ 'ips' ] :
return ip [ 'version' ] , ip [ 'ip' ] |
def txn2data ( self , txn : dict ) -> str :
"""Given ledger transaction , return its data json .
: param txn : transaction as dict
: return : transaction data json""" | rv_json = json . dumps ( { } )
if self == Protocol . V_13 :
rv_json = json . dumps ( txn [ 'result' ] . get ( 'data' , { } ) )
else :
rv_json = json . dumps ( ( txn [ 'result' ] . get ( 'data' , { } ) or { } ) . get ( 'txn' , { } ) )
# " data " : null for no such txn
return rv_json |
def p_joinx ( self , t ) : # todo : support join types http : / / www . postgresql . org / docs / 9.4 / static / queries - table - expressions . html # QUERIES - JOIN
"""joinx : fromtable jointype fromtable
| fromtable jointype fromtable kw _ on expression
| fromtable jointype fromtable kw _ using ' ( ' namelist ' ) '""" | if len ( t ) == 4 :
t [ 0 ] = JoinX ( t [ 1 ] , t [ 3 ] , None , t [ 2 ] )
elif len ( t ) == 6 :
t [ 0 ] = JoinX ( t [ 1 ] , t [ 3 ] , t [ 5 ] , t [ 2 ] )
else :
raise NotImplementedError ( 'todo: join .. using' ) |
def editLogSettings ( self , logLevel = "WARNING" , logDir = None , maxLogFileAge = 90 , maxErrorReportsCount = 10 ) :
"""The log settings are for the entire site .
Inputs :
logLevel - Can be one of [ OFF , SEVERE , WARNING , INFO , FINE ,
VERBOSE , DEBUG ] .
logDir - File path to the root of the log directory
maxLogFileAge - number of days that a server should save a log
file .
maxErrorReportsCount - maximum number of error report files
per machine""" | lURL = self . _url + "/settings/edit"
allowed_levels = ( "OFF" , "SEVERE" , "WARNING" , "INFO" , "FINE" , "VERBOSE" , "DEBUG" )
currentSettings = self . logSettings
currentSettings [ "f" ] = "json"
if logLevel . upper ( ) in allowed_levels :
currentSettings [ 'logLevel' ] = logLevel . upper ( )
if logDir is not None :
currentSettings [ 'logDir' ] = logDir
if maxLogFileAge is not None and isinstance ( maxLogFileAge , int ) :
currentSettings [ 'maxLogFileAge' ] = maxLogFileAge
if maxErrorReportsCount is not None and isinstance ( maxErrorReportsCount , int ) and maxErrorReportsCount > 0 :
currentSettings [ 'maxErrorReportsCount' ] = maxErrorReportsCount
return self . _post ( url = lURL , param_dict = currentSettings , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) |
def TypeFactory ( v ) :
"""Ensure ` v ` is a valid Type .
This function is used to convert user - specified types into
internal types for the verification engine . It allows Type
subclasses , Type subclass instances , Python type , and user - defined
classes to be passed . Returns an instance of the type of ` v ` .
Users should never access this function directly .""" | if v is None :
return Nothing ( )
elif issubclass ( type ( v ) , Type ) :
return v
elif issubclass ( v , Type ) :
return v ( )
elif issubclass ( type ( v ) , type ) :
return Generic ( v )
else :
raise InvalidTypeError ( "Invalid type %s" % v ) |
def view ( self , * args , ** kwargs ) :
"""Decorator to automatically apply as _ view decorator and register it .""" | def decorator ( f ) :
kwargs . setdefault ( "view_class" , self . view_class )
return self . add_view ( as_view ( * args , ** kwargs ) ( f ) )
return decorator |
def user_requests ( self , id , ** kwargs ) :
"https : / / developer . zendesk . com / rest _ api / docs / core / requests # list - requests" | api_path = "/api/v2/users/{id}/requests.json"
api_path = api_path . format ( id = id )
return self . call ( api_path , ** kwargs ) |
def save_module ( self , obj ) :
"""Save a module as an import""" | mod_name = obj . __name__
# If module is successfully found then it is not a dynamically created module
if hasattr ( obj , '__file__' ) :
is_dynamic = False
else :
try :
_find_module ( mod_name )
is_dynamic = False
except ImportError :
is_dynamic = True
self . modules . add ( obj )
if is_dynamic :
self . save_reduce ( dynamic_subimport , ( obj . __name__ , vars ( obj ) ) , obj = obj )
else :
self . save_reduce ( subimport , ( obj . __name__ , ) , obj = obj ) |
def _wcut ( l , windowsize , stepsize ) :
"""Parameters
l - The length of the input array
windowsize - the size of each window of samples
stepsize - the number of samples to move the window each step
Returns
The length the input array should be so that leftover samples are ignored""" | end = l - windowsize
if l <= windowsize :
return 0 , 0
elif end % stepsize :
l = windowsize + ( ( end // stepsize ) * stepsize )
return l , l - ( windowsize - stepsize ) |
def write_to ( self , group , append = False ) :
"""Write stored features to a given group""" | if self . sparsetodense :
self . data = [ x . todense ( ) if sp . issparse ( x ) else x for x in self . data ]
nframes = sum ( [ d . shape [ 0 ] for d in self . data ] )
dim = self . _group_dim ( group )
feats = np . concatenate ( self . data , axis = 0 )
if append :
nframes_group = group [ self . name ] . shape [ 0 ]
group [ self . name ] . resize ( nframes_group + nframes , axis = 0 )
if dim == 1 :
group [ self . name ] [ nframes_group : ] = feats
else :
group [ self . name ] [ nframes_group : , : ] = feats
else :
group [ self . name ] . resize ( nframes , axis = 0 )
group [ self . name ] [ ... ] = feats if dim == 1 else feats |
def delegate ( self , fn , * args , ** kwargs ) :
"""Return the given operation as an asyncio future .""" | callback = functools . partial ( fn , * args , ** kwargs )
coro = self . loop . run_in_executor ( self . subexecutor , callback )
return asyncio . ensure_future ( coro ) |
def strip_key_strings ( pofile ) :
"""Removes all entries in PO which are key strings .
These entries should appear only in messages . po , not in any other po files .""" | newlist = [ entry for entry in pofile if not is_key_string ( entry . msgid ) ]
del pofile [ : ]
pofile += newlist |
def send_data ( self , endpoint = None , ** kwargs ) :
"""Sends data to the API .
This call is similar to ` ` fetch ` ` , but * * sends * * data to the API instead
of retrieving it .
Returned data will appear in the ` ` items ` ` key of the resulting
dictionary .
Sending data * * requires * * that the ` ` token ` ` is set .
: param endpoint : ( string ) * * ( Required ) * * The API end point being called . Available endpoints are listed on
the official ` API Documentation < https : / / github . com / Charcoal - SE / metasmoke / wiki / API - Documentation > ` _ _ .
If no end point is passed , a ` ` ValueError ` ` will be raised
: param kwargs : Parameters accepted by individual endpoints . These parameters
* * must * * be named the same as described in the endpoint documentation
: rtype : ( dictionary ) A dictionary containing wrapper data regarding the API call
and the results of the call in the ` items ` key . If multiple
pages were received , all of the results will appear in the
` ` items ` ` tag .""" | if not endpoint :
raise ValueError ( 'No end point provided.' )
if not self . token :
raise ValueError ( 'A write token has not been set. This is required for all MetaSmoke API routes. This can\n' 'be set by setting the "token" parameter of your SmokeAPI object.' )
self . _endpoint = endpoint
params = { "key" : self . _api_key , "token" : self . token }
if 'ids' in kwargs :
ids = ';' . join ( str ( x ) for x in kwargs [ 'ids' ] )
kwargs . pop ( 'ids' , None )
else :
ids = None
params . update ( kwargs )
data = [ ]
base_url = "{}{}/" . format ( self . _base_url , endpoint )
response = requests . post ( base_url , data = params , proxies = self . proxy )
self . _previous_call = response . url
response = response . json ( )
try :
code = response [ "error_code" ]
name = response [ "error_name" ]
message = response [ "error_message" ]
raise SmokeAPIError ( self . _previous_call , code , name , message )
except KeyError :
pass
# This means there is no error
data . append ( response )
r = [ ]
for d in data :
r . extend ( d [ 'items' ] )
items = list ( chain ( r ) )
result = { 'has_more' : data [ - 1 ] [ 'has_more' ] , 'page' : params [ 'page' ] , 'total' : len ( items ) , 'items' : items }
return result |
def log_flush_for_interval ( self , log_type , interval ) :
"""Flush logs for an interval of time .
Args :
log _ type ( str ) : Only documented type is " policies " . This
will be applied by default if nothing is passed .
interval ( str ) : Combination of " Zero " , " One " , " Two " ,
" Three " , " Six " , and " Day " , " Week " , " Month " , " Year " . e . g .
( " Three + Months " ) Please note : The documentation for this
specifies the singular form ( e . g . " Month " ) , and plural
( " Months " ) at different times , and further the
construction is listed as " THREE MONTHS " elsewhere .
Limited testing indicates that pluralization does not
matter , nor does capitalization .
Please test !
No validation is performed on this prior to the request
being made .
Raises :
JSSDeleteError if provided url _ path has a > = 400 response .""" | if not log_type :
log_type = "policies"
# The XML for the / logflush basic endpoint allows spaces
# instead of " + " , so do a replace here just in case .
interval = interval . replace ( " " , "+" )
flush_url = "{}/{}/interval/{}" . format ( self . url , log_type , interval )
self . jss . delete ( flush_url ) |
def get_commands_from ( self , args ) :
"""We have to code the key names for each depth . This method scans
for each level and returns a list of the command arguments .""" | commands = [ ]
for i in itertools . count ( 0 ) :
try :
commands . append ( getattr ( args , self . arg_label_fmt % i ) )
except AttributeError :
break
return commands |
async def expose ( self , application ) :
""": param application string :
Application holds the placeholder name of the application that must
be exposed .""" | application = self . resolve ( application )
log . info ( 'Exposing %s' , application )
return await self . model . applications [ application ] . expose ( ) |
def channel_names ( self , usecols = None ) :
"""Attempt to extract the channel names from the data
file . Return a list with names . Return None on failed attempt .
usecols : A list with columns to use . If present , the returned
list will include only names for columns requested . It will
align with the columns returned by numpys loadtxt by using the
same keyword ( usecols ) .""" | # Search from [ rts - 1 ] and up ( last row before data ) . Split respective
# row on datdel . Accept consecutive elements starting with alphas
# character after strip . If the count of elements equals the data count
# on row rts + 1 , accept it as the channel names .
if self . decdel == '.' :
datcnt = self . matches_p [ self . rts ]
elif self . decdel == ',' :
datcnt = self . matches_c [ self . rts ]
if usecols and max ( usecols ) >= datcnt :
mess = ' Max column index is '
raise IndexError ( str ( usecols ) + mess + str ( datcnt - 1 ) )
names = None
if not self . rts : # Only data .
return None
# From last row before data and up .
for row in self . rows [ self . rts - 1 : : - 1 ] : # datdel might be None , ( whitespace )
splitlist = row . split ( self . datdel )
for i , word in enumerate ( splitlist ) :
if not word . strip ( ) . startswith ( ALPHAS ) :
break
elif i + 1 == datcnt : # Accept
names = [ ch . strip ( ) for ch in splitlist [ : datcnt ] ]
break
if names :
break
if usecols :
names = [ names [ i ] for i in sorted ( usecols ) ]
return names |
def stop_proxying ( self , signal_source , * signal_names , weak_ref = False ) :
""": meth : ` . WSignalProxyProto . stop _ proxying ` implementation""" | callback = self . __callback if weak_ref is False else self . __weak_ref_callback
for signal_name in signal_names :
signal_source . remove_callback ( signal_name , callback ) |
def exception ( # type : ignore
self , msg , * args , exc_info = True , ** kwargs ) -> Task :
"""Convenience method for logging an ERROR with exception information .""" | return self . error ( msg , * args , exc_info = exc_info , ** kwargs ) |
def xpathNextParent ( self , cur ) :
"""Traversal function for the " parent " direction The parent
axis contains the parent of the context node , if there is
one .""" | if cur is None :
cur__o = None
else :
cur__o = cur . _o
ret = libxml2mod . xmlXPathNextParent ( self . _o , cur__o )
if ret is None :
raise xpathError ( 'xmlXPathNextParent() failed' )
__tmp = xmlNode ( _obj = ret )
return __tmp |
def remove_dups ( head ) :
"""Time Complexity : O ( N )
Space Complexity : O ( N )""" | hashset = set ( )
prev = Node ( )
while head :
if head . val in hashset :
prev . next = head . next
else :
hashset . add ( head . val )
prev = head
head = head . next |
def save_session ( self , sid , session , namespace = None ) :
"""Store the user session for a client .
The only difference with the : func : ` socketio . Server . save _ session `
method is that when the ` ` namespace ` ` argument is not given the
namespace associated with the class is used .""" | return self . server . save_session ( sid , session , namespace = namespace or self . namespace ) |
def set_replication_enabled ( status , host = None , core_name = None ) :
'''MASTER ONLY
Sets the master to ignore poll requests from the slaves . Useful when you
don ' t want the slaves replicating during indexing or when clearing the
index .
status : boolean
Sets the replication status to the specified state .
host : str ( None )
The solr host to query . _ _ opts _ _ [ ' host ' ] is default .
core _ name : str ( None )
The name of the solr core if using cores . Leave this blank if you are
not using cores or if you want to set the status on all cores .
Return : dict < str , obj > : :
{ ' success ' : boolean , ' data ' : dict , ' errors ' : list , ' warnings ' : list }
CLI Example :
. . code - block : : bash
salt ' * ' solr . set _ replication _ enabled false , None , music''' | if not _is_master ( ) and _get_none_or_value ( host ) is None :
return _get_return_dict ( False , errors = [ 'Only minions configured as master can run this' ] )
cmd = 'enablereplication' if status else 'disablereplication'
if _get_none_or_value ( core_name ) is None and _check_for_cores ( ) :
ret = _get_return_dict ( )
success = True
for name in __opts__ [ 'solr.cores' ] :
resp = set_replication_enabled ( status , host , name )
if not resp [ 'success' ] :
success = False
data = { name : { 'data' : resp [ 'data' ] } }
ret = _update_return_dict ( ret , success , data , resp [ 'errors' ] , resp [ 'warnings' ] )
return ret
else :
if status :
return _replication_request ( cmd , host = host , core_name = core_name )
else :
return _replication_request ( cmd , host = host , core_name = core_name ) |
def SRU_Compute_CPU ( activation_type , d , bidirectional = False , scale_x = 1 ) :
"""CPU version of the core SRU computation .
Has the same interface as SRU _ Compute _ GPU ( ) but is a regular Python function
instead of a torch . autograd . Function because we don ' t implement backward ( )
explicitly .""" | def sru_compute_cpu ( u , x , bias , init = None , mask_h = None ) :
bidir = 2 if bidirectional else 1
length = x . size ( 0 ) if x . dim ( ) == 3 else 1
batch = x . size ( - 2 )
k = u . size ( - 1 ) // d // bidir
if mask_h is None :
mask_h = 1
u = u . view ( length , batch , bidir , d , k )
x_tilde = u [ ... , 0 ]
forget_bias , reset_bias = bias . view ( 2 , bidir , d )
forget = ( u [ ... , 1 ] + forget_bias ) . sigmoid ( )
reset = ( u [ ... , 2 ] + reset_bias ) . sigmoid ( )
if k == 3 :
x_prime = x . view ( length , batch , bidir , d )
x_prime = x_prime * scale_x if scale_x != 1 else x_prime
else :
x_prime = u [ ... , 3 ]
h = Variable ( x . data . new ( length , batch , bidir , d ) )
if init is None :
c_init = Variable ( x . data . new ( batch , bidir , d ) . zero_ ( ) )
else :
c_init = init . view ( batch , bidir , d )
c_final = [ ]
for di in range ( bidir ) :
if di == 0 :
time_seq = range ( length )
else :
time_seq = range ( length - 1 , - 1 , - 1 )
c_prev = c_init [ : , di , : ]
for t in time_seq :
c_t = ( c_prev - x_tilde [ t , : , di , : ] ) * forget [ t , : , di , : ] + x_tilde [ t , : , di , : ]
c_prev = c_t
if activation_type == 0 :
g_c_t = c_t
elif activation_type == 1 :
g_c_t = c_t . tanh ( )
elif activation_type == 2 :
g_c_t = nn . functional . relu ( c_t )
else :
assert False , 'Activation type must be 0, 1, or 2, not {}' . format ( activation_type )
h [ t , : , di , : ] = ( g_c_t * mask_h - x_prime [ t , : , di , : ] ) * reset [ t , : , di , : ] + x_prime [ t , : , di , : ]
c_final . append ( c_t )
return h . view ( length , batch , - 1 ) , torch . stack ( c_final , dim = 1 ) . view ( batch , - 1 )
return sru_compute_cpu |
def serialize_all ( nodes , stream = None , Dumper = Dumper , canonical = None , indent = None , width = None , allow_unicode = None , line_break = None , encoding = None , explicit_start = None , explicit_end = None , version = None , tags = None ) :
"""Serialize a sequence of representation trees into a YAML stream .
If stream is None , return the produced string instead .""" | getvalue = None
if stream is None :
if encoding is None :
stream = io . StringIO ( )
else :
stream = io . BytesIO ( )
getvalue = stream . getvalue
dumper = Dumper ( stream , canonical = canonical , indent = indent , width = width , allow_unicode = allow_unicode , line_break = line_break , encoding = encoding , version = version , tags = tags , explicit_start = explicit_start , explicit_end = explicit_end )
try :
dumper . open ( )
for node in nodes :
dumper . serialize ( node )
dumper . close ( )
finally :
dumper . dispose ( )
if getvalue :
return getvalue ( ) |
def _extract_email ( data ) :
"""{ ' elements ' : [ { ' handle ' : ' urn : li : emailAddress : 319371470 ' ,
' handle ~ ' : { ' emailAddress ' : ' raymond . penners @ intenct . nl ' } } ] }""" | ret = ''
elements = data . get ( 'elements' , [ ] )
if len ( elements ) > 0 :
ret = elements [ 0 ] . get ( 'handle~' , { } ) . get ( 'emailAddress' , '' )
return ret |
def migrate ( self , host , port , key , destination_db , timeout , copy = False , replace = False ) :
"""Atomically transfer a key from a source Redis instance to a
destination Redis instance . On success the key is deleted from the
original instance and is guaranteed to exist in the target instance .
The command is atomic and blocks the two instances for the time
required to transfer the key , at any given time the key will appear to
exist in a given instance or in the other instance , unless a timeout
error occurs .
. . note : :
* * Time complexity * * : This command actually executes a DUMP + DEL in
the source instance , and a RESTORE in the target instance . See the
pages of these commands for time complexity . Also an ` ` O ( N ) ` ` data
transfer between the two instances is performed .
: param host : The host to migrate the key to
: type host : bytes , str
: param int port : The port to connect on
: param key : The key to migrate
: type key : bytes , str
: param int destination _ db : The database number to select
: param int timeout : The maximum idle time in milliseconds
: param bool copy : Do not remove the key from the local instance
: param bool replace : Replace existing key on the remote instance
: rtype : bool
: raises : : exc : ` ~ tredis . exceptions . RedisError `""" | command = [ b'MIGRATE' , host , ascii ( port ) . encode ( 'ascii' ) , key , ascii ( destination_db ) . encode ( 'ascii' ) , ascii ( timeout ) . encode ( 'ascii' ) ]
if copy is True :
command . append ( b'COPY' )
if replace is True :
command . append ( b'REPLACE' )
return self . _execute ( command , b'OK' ) |
def from_xml ( cls , xml_bytes ) :
"""Create an instance of this from XML bytes .
@ param xml _ bytes : C { str } bytes of XML to parse
@ return : an instance of L { MultipartInitiationResponse }""" | root = XML ( xml_bytes )
return cls ( root . findtext ( 'Bucket' ) , root . findtext ( 'Key' ) , root . findtext ( 'UploadId' ) ) |
def standard_program_header ( self , title , length , line = 32768 ) :
"""Generates a standard header block of PROGRAM type""" | self . save_header ( self . HEADER_TYPE_BASIC , title , length , param1 = line , param2 = length ) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.