signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def memoize ( f ) :
"""Memoization decorator for a function taking one or more arguments ."""
|
def _c ( * args , ** kwargs ) :
if not hasattr ( f , 'cache' ) :
f . cache = dict ( )
key = ( args , tuple ( kwargs ) )
if key not in f . cache :
f . cache [ key ] = f ( * args , ** kwargs )
return f . cache [ key ]
return wraps ( f ) ( _c )
|
def _convert_xml_to_queue_messages ( response , decode_function ) :
'''< ? xml version = " 1.0 " encoding = " utf - 8 " ? >
< QueueMessagesList >
< QueueMessage >
< MessageId > string - message - id < / MessageId >
< InsertionTime > insertion - time < / InsertionTime >
< ExpirationTime > expiration - time < / ExpirationTime >
< PopReceipt > opaque - string - receipt - data < / PopReceipt >
< TimeNextVisible > time - next - visible < / TimeNextVisible >
< DequeueCount > integer < / DequeueCount >
< MessageText > message - body < / MessageText >
< / QueueMessage >
< / QueueMessagesList >'''
|
if response is None or response . body is None :
return response
messages = list ( )
list_element = ETree . fromstring ( response . body )
for message_element in list_element . findall ( 'QueueMessage' ) :
message = QueueMessage ( )
message . id = message_element . findtext ( 'MessageId' )
message . dequeue_count = message_element . findtext ( 'DequeueCount' )
message . content = decode_function ( message_element . findtext ( 'MessageText' ) )
message . insertion_time = parser . parse ( message_element . findtext ( 'InsertionTime' ) )
message . expiration_time = parser . parse ( message_element . findtext ( 'ExpirationTime' ) )
message . pop_receipt = message_element . findtext ( 'PopReceipt' )
time_next_visible = message_element . find ( 'TimeNextVisible' )
if time_next_visible is not None :
message . time_next_visible = parser . parse ( time_next_visible . text )
# Add message to list
messages . append ( message )
return messages
|
def codemirror_field_js_bundle ( field ) :
"""Filter to get CodeMirror Javascript bundle name needed for a single field .
Example :
{ % load djangocodemirror _ tags % }
{ { form . myfield | codemirror _ field _ js _ bundle } }
Arguments :
field ( django . forms . fields . Field ) : A form field that contains a widget
: class : ` djangocodemirror . widget . CodeMirrorWidget ` .
Raises :
CodeMirrorFieldBundleError : If Codemirror configuration form field
does not have a bundle name .
Returns :
string : Bundle name to load with webassets ."""
|
manifesto = CodemirrorAssetTagRender ( )
manifesto . register_from_fields ( field )
try :
bundle_name = manifesto . js_bundle_names ( ) [ 0 ]
except IndexError :
msg = ( "Given field with configuration name '{}' does not have a " "Javascript bundle name" )
raise CodeMirrorFieldBundleError ( msg . format ( field . config_name ) )
return bundle_name
|
def change_volume ( self , increment ) :
"""调整音量大小"""
|
if increment == 1 :
self . volume += 5
else :
self . volume -= 5
self . volume = max ( min ( self . volume , 100 ) , 0 )
|
def remove_entity_tags ( self ) :
'''Returns
A new TermDocumentMatrix consisting of only terms in the current TermDocumentMatrix
that aren ' t spaCy entity tags .
Note : Used if entity types are censored using FeatsFromSpacyDoc ( tag _ types _ to _ censor = . . . ) .'''
|
terms_to_remove = [ term for term in self . _term_idx_store . _i2val if any ( [ word in SPACY_ENTITY_TAGS for word in term . split ( ) ] ) ]
return self . remove_terms ( terms_to_remove )
|
def _ObjectFactory ( obj ) :
"""Parse a python ` ` { name : schema } ` ` dict as an : py : class : ` Object ` instance .
- A property name prepended by " + " is required
- A property name prepended by " ? " is optional
- Any other property is required if : py : attr : ` Object . REQUIRED _ PROPERTIES `
is True else it ' s optional"""
|
if isinstance ( obj , dict ) :
optional , required = { } , { }
for key , value in iteritems ( obj ) :
if key . startswith ( "+" ) :
required [ key [ 1 : ] ] = value
elif key . startswith ( "?" ) :
optional [ key [ 1 : ] ] = value
elif Object . REQUIRED_PROPERTIES :
required [ key ] = value
else :
optional [ key ] = value
return Object ( optional , required )
|
def explain_image ( self , labels , instance , column_name = None , num_features = 100000 , num_samples = 300 , batch_size = 200 , hide_color = 0 ) :
"""Explain an image of a prediction .
It analyze the prediction by LIME , and returns a report of which words are most impactful
in contributing to certain labels .
Args :
labels : a list of labels to explain .
instance : the prediction instance . It needs to conform to model ' s input . Can be a csv
line string , or a dict .
column _ name : which image column to explain . Can be None if there is only one image column
in the model input .
num _ features : maximum number of areas ( features ) to analyze . Passed to
LIME LimeImageExplainer directly .
num _ samples : size of the neighborhood to learn the linear model . Passed to
LIME LimeImageExplainer directly .
batch _ size : size of batches passed to predict _ fn . Passed to
LIME LimeImageExplainer directly .
hide _ color : the color used to perturb images . Passed to
LIME LimeImageExplainer directly .
Returns :
A LIME ' s lime . explanation . Explanation .
Throws :
ValueError if the given image column is not found in model input or column _ name is None
but there are multiple image columns in model input ."""
|
from lime . lime_image import LimeImageExplainer
if len ( self . _image_columns ) > 1 and not column_name :
raise ValueError ( 'There are multiple image columns in the input of the model. ' + 'Please specify "column_name".' )
elif column_name and column_name not in self . _image_columns :
raise ValueError ( 'Specified column_name "%s" not found in the model input.' % column_name )
image_column_name = column_name if column_name else self . _image_columns [ 0 ]
if isinstance ( instance , six . string_types ) :
instance = next ( csv . DictReader ( [ instance ] , fieldnames = self . _headers ) )
predict_fn = self . _make_image_predict_fn ( labels , instance , image_column_name )
explainer = LimeImageExplainer ( )
with file_io . FileIO ( instance [ image_column_name ] , 'rb' ) as fi :
im = Image . open ( fi )
im . thumbnail ( ( 299 , 299 ) , Image . ANTIALIAS )
rgb_im = np . asarray ( im . convert ( 'RGB' ) )
exp = explainer . explain_instance ( rgb_im , predict_fn , labels = range ( len ( labels ) ) , top_labels = None , hide_color = hide_color , num_features = num_features , num_samples = num_samples , batch_size = batch_size )
return exp
|
def destroy ( self , si , logger , session , vcenter_data_model , vm_uuid , vm_name , reservation_id ) :
""": param si :
: param logger :
: param CloudShellAPISession session :
: param vcenter _ data _ model :
: param vm _ uuid :
: param str vm _ name : This is the resource name
: param reservation _ id :
: return :"""
|
# disconnect
self . _disconnect_all_my_connectors ( session = session , resource_name = vm_name , reservation_id = reservation_id , logger = logger )
# find vm
vm = self . pv_service . find_by_uuid ( si , vm_uuid )
if vm is not None : # destroy vm
result = self . pv_service . destroy_vm ( vm = vm , logger = logger )
else :
logger . info ( "Could not find the VM {0},will remove the resource." . format ( vm_name ) )
result = True
# delete resources
self . resource_remover . remove_resource ( session = session , resource_full_name = vm_name )
return result
|
def get_family_query_session ( self , proxy = None ) :
"""Gets the ` ` OsidSession ` ` associated with the family query service .
arg : proxy ( osid . proxy . Proxy ) : a proxy
return : ( osid . relationship . FamilyQuerySession ) - a
` ` FamilyQuerySession ` `
raise : NullArgument - ` ` proxy ` ` is ` ` null ` `
raise : OperationFailed - unable to complete request
raise : Unimplemented - ` ` supports _ family _ query ( ) ` ` is ` ` false ` `
* compliance : optional - - This method must be implemented if ` ` supports _ family _ query ( ) ` ` is ` ` true ` ` . *"""
|
if not self . supports_family_query ( ) :
raise Unimplemented ( )
try :
from . import sessions
except ImportError :
raise OperationFailed ( )
proxy = self . _convert_proxy ( proxy )
try :
session = sessions . FamilyQuerySession ( proxy = proxy , runtime = self . _runtime )
except AttributeError :
raise OperationFailed ( )
return session
|
def iter_alignments ( dataset , cognate_sets , column = 'Segments' , method = 'library' ) :
"""Function computes automatic alignments and writes them to file ."""
|
if not isinstance ( dataset , lingpy . basic . parser . QLCParser ) :
wordlist = _cldf2wordlist ( dataset )
cognates = { r [ 'Form_ID' ] : r for r in cognate_sets }
wordlist . add_entries ( 'cogid' , 'lid' , lambda x : cognates [ x ] [ 'Cognateset_ID' ] if x in cognates else 0 )
alm = lingpy . Alignments ( wordlist , ref = 'cogid' , row = 'parameter_id' , col = 'language_id' , segments = column . lower ( ) )
alm . align ( method = method )
for k in alm :
if alm [ k , 'lid' ] in cognates :
cognate = cognates [ alm [ k , 'lid' ] ]
cognate [ 'Alignment' ] = alm [ k , 'alignment' ]
cognate [ 'Alignment_Method' ] = method
else :
alm = lingpy . Alignments ( dataset , ref = 'cogid' )
alm . align ( method = method )
for cognate in cognate_sets :
idx = cognate [ 'ID' ] or cognate [ 'Form_ID' ]
cognate [ 'Alignment' ] = alm [ int ( idx ) , 'alignment' ]
cognate [ 'Alignment_Method' ] = 'SCA-' + method
|
def main ( ) :
"""Sample usage for this python module
This main method simply illustrates sample usage for this python
module .
: return : None"""
|
log = logging . getLogger ( mod_logger + '.main' )
parser = argparse . ArgumentParser ( description = 'cons3rt deployment CLI' )
parser . add_argument ( 'command' , help = 'Command for the deployment CLI' )
parser . add_argument ( '--network' , help = 'Name of the network' )
parser . add_argument ( '--name' , help = 'Name of a deployment property to get' )
args = parser . parse_args ( )
valid_commands = [ 'ip' , 'device' , 'prop' ]
valid_commands_str = ',' . join ( valid_commands )
# Get the command
command = args . command . strip ( ) . lower ( )
# Ensure the command is valid
if command not in valid_commands :
print ( 'Invalid command found [{c}]\n' . format ( c = command ) + valid_commands_str )
return 1
if command == 'ip' :
if not args . network :
print ( 'Missed arg: --network, for the name of the network' )
elif command == 'device' :
if not args . network :
print ( 'Missed arg: --network, for the name of the network' )
return 1
d = Deployment ( )
print ( d . get_device_for_network_linux ( network_name = args . network ) )
elif command == 'prop' :
if not args . name :
print ( 'Missed arg: --name, for the name of the property to retrieve' )
return 1
d = Deployment ( )
print ( d . get_value ( property_name = args . name ) )
|
def db_migrate ( ) :
"""Migrate DB"""
|
print ( "Not ready for use" )
exit ( )
cwd_to_sys_path ( )
alembic = _set_flask_alembic ( )
with application . app . app_context ( ) :
p = db . Model . __subclasses__ ( )
print ( p )
# Auto - generate a migration
alembic . revision ( 'making changes' )
# Upgrade the database
alembic . upgrade ( )
|
def tasks_runner ( request ) :
"""A page that let the admin to run global tasks ."""
|
# server info
cached_layers_number = 0
cached_layers = cache . get ( 'layers' )
if cached_layers :
cached_layers_number = len ( cached_layers )
cached_deleted_layers_number = 0
cached_deleted_layers = cache . get ( 'deleted_layers' )
if cached_deleted_layers :
cached_deleted_layers_number = len ( cached_deleted_layers )
# task actions
if request . method == 'POST' :
if 'check_all' in request . POST :
if settings . REGISTRY_SKIP_CELERY :
check_all_services ( )
else :
check_all_services . delay ( )
if 'index_all' in request . POST :
if settings . REGISTRY_SKIP_CELERY :
index_all_layers ( )
else :
index_all_layers . delay ( )
if 'index_cached' in request . POST :
if settings . REGISTRY_SKIP_CELERY :
index_cached_layers ( )
else :
index_cached_layers . delay ( )
if 'drop_cached' in request . POST :
cache . set ( 'layers' , None )
cache . set ( 'deleted_layers' , None )
if 'clear_index' in request . POST :
if settings . REGISTRY_SKIP_CELERY :
clear_index ( )
else :
clear_index . delay ( )
if 'remove_index' in request . POST :
if settings . REGISTRY_SKIP_CELERY :
unindex_layers_with_issues ( )
else :
unindex_layers_with_issues . delay ( )
return render ( request , 'aggregator/tasks_runner.html' , { 'cached_layers_number' : cached_layers_number , 'cached_deleted_layers_number' : cached_deleted_layers_number , } )
|
def ReadFromDirectory ( self , artifacts_reader , path , extension = 'yaml' ) :
"""Reads artifact definitions into the registry from files in a directory .
This function does not recurse sub directories .
Args :
artifacts _ reader ( ArtifactsReader ) : an artifacts reader .
path ( str ) : path of the directory to read from .
extension ( Optional [ str ] ) : extension of the filenames to read .
Raises :
KeyError : if a duplicate artifact definition is encountered ."""
|
for artifact_definition in artifacts_reader . ReadDirectory ( path , extension = extension ) :
self . RegisterDefinition ( artifact_definition )
|
def _send ( key , value , metric_type ) :
"""Send the specified value to the statsd daemon via UDP without a
direct socket connection .
: param str value : The properly formatted statsd counter value"""
|
if STATSD_PREFIX :
key = '.' . join ( [ STATSD_PREFIX , key ] )
try :
STATSD_SOCKET . sendto ( '{0}:{1}|{2}' . format ( key , value , metric_type ) . encode ( ) , STATSD_ADDR )
except socket . error :
LOGGER . exception ( SOCKET_ERROR )
|
async def _RPC_handler ( self , request : Dict [ str , Any ] ) :
"""用于调用函数并执行 . 同时如果执行出错也负责将错误转化为对应的调用错误返回给客户端 .
执行成功后根据结果进行不同的处理 , 如果注册的是函数 , 实例中的方法 , 或者协程 , 则获取计算得的结果 , 并返回给客户端 .
如果是异步生成器函数 , 那么返回的就是一个对应的异步生成器 , 我们通过对其包装后循环调用实现流传输 .
Parameters :
request ( Dict [ str , Any ] ) : - python字典形式的请求
Raise :
( Exception ) : - 当执行调用后抛出了异常 , 且异常不在定义范围内 , 则抛出
Return :
( bool ) : - 当正常调用则返回True , 如果抛出了规定范围内的异常则返回False"""
|
ID = request . get ( "ID" )
method = request . get ( "METHOD" )
with_return = request . get ( "RETURN" )
args = request . get ( "ARGS" ) or [ ]
kwargs = request . get ( "KWARGS" ) or { }
try :
if method is None :
raise RequestError ( "request do not have method" , request . get ( "ID" ) )
if method == "system.getresult" :
await self . _get_result ( ID , * args , ** kwargs )
else :
result = await self . method_wrapper . apply ( ID , method , * args , ** kwargs )
except MethodError as se :
exinfo = traceback . TracebackException . from_exception ( se ) . format ( chain = True )
frames = "" . join ( [ i + "/n" for i in exinfo ] )
response = { "MPRPC" : self . VERSION , "CODE" : se . status_code , "MESSAGE" : { "ID" : ID , 'EXCEPTION' : str ( type ( se ) ) , 'MESSAGE' : str ( se ) , "DATA" : { 'METHOD' : request . get ( "METHOD" ) , "ARGS" : request . get ( "ARGS" ) , "KWARGS" : request . get ( "KWARGS" ) , 'FRAME' : frames } } }
self . writer ( response )
return False
except ServerException as me :
response = { "MPRPC" : self . VERSION , "CODE" : me . status_code , }
self . writer ( response )
return False
except Exception as e :
if self . debug is True :
raise e
else :
logger . info ( "Task[{}]: Unknown Error {}:\nmessage:{}" . format ( ID , e . __class__ . __name__ , str ( e ) ) )
else :
if with_return :
if inspect . isasyncgen ( result ) :
await self . _asyncgen_wrap ( result , ID )
else :
response = { "MPRPC" : self . VERSION , "CODE" : 200 , "MESSAGE" : { "ID" : ID , 'RESULT' : result } }
self . writer ( response )
if self . debug :
access_logger . info ( "Task[{}]: response answered" . format ( ID ) , extra = self . _extra )
return result
|
def _onMouseWheel ( self , evt ) :
"""Translate mouse wheel events into matplotlib events"""
|
# Determine mouse location
x = evt . GetX ( )
y = self . figure . bbox . height - evt . GetY ( )
# Convert delta / rotation / rate into a floating point step size
delta = evt . GetWheelDelta ( )
rotation = evt . GetWheelRotation ( )
rate = evt . GetLinesPerAction ( )
# print " delta , rotation , rate " , delta , rotation , rate
step = rate * float ( rotation ) / delta
# Done handling event
evt . Skip ( )
# Mac is giving two events for every wheel event
# Need to skip every second one
if wx . Platform == '__WXMAC__' :
if not hasattr ( self , '_skipwheelevent' ) :
self . _skipwheelevent = True
elif self . _skipwheelevent :
self . _skipwheelevent = False
return
# Return without processing event
else :
self . _skipwheelevent = True
# Convert to mpl event
FigureCanvasBase . scroll_event ( self , x , y , step , guiEvent = evt )
|
def _batch_arguments ( self ) :
"""Arguments specific to Batch API writes .
- - batch _ action action Action for the batch job [ ' Create ' , ' Delete ' ] .
- - batch _ chunk number The maximum number of indicator per batch job .
- - batch _ halt _ on _ error Flag to indicate that the batch job should halt on error .
- - batch _ poll _ interval seconds Seconds between batch status polls .
- - batch _ interval _ max seconds Seconds before app should time out waiting on batch job
completion .
- - batch _ write _ type type Write type for Indicator attributes [ ' Append ' , ' Replace ' ] ."""
|
self . add_argument ( '--batch_action' , choices = [ 'Create' , 'Delete' ] , default = self . _batch_action , help = 'Action for the batch job' , )
self . add_argument ( '--batch_chunk' , default = self . _batch_chunk , help = 'Max number of indicators per batch' , type = int , )
self . add_argument ( '--batch_halt_on_error' , action = 'store_true' , default = self . _batch_halt_on_error , help = 'Halt batch job on error' , )
self . add_argument ( '--batch_poll_interval' , default = self . _batch_poll_interval , help = 'Frequency to run status check for batch job.' , type = int , )
self . add_argument ( '--batch_poll_interval_max' , default = self . _batch_poll_interval_max , help = 'Maximum amount of time for status check on batch job.' , type = int , )
self . add_argument ( '--batch_write_type' , choices = [ 'Append' , 'Replace' ] , default = self . _batch_write_type , help = 'Append or Replace attributes.' , )
|
def _downsample_array ( col : np . array , target : int , random_state : int = 0 , replace : bool = True , inplace : bool = False ) :
"""Evenly reduce counts in cell to target amount .
This is an internal function and has some restrictions :
* ` dtype ` of col must be an integer ( i . e . satisfy issubclass ( col . dtype . type , np . integer ) )
* total counts in cell must be less than target"""
|
np . random . seed ( random_state )
cumcounts = col . cumsum ( )
if inplace :
col [ : ] = 0
else :
col = np . zeros_like ( col )
total = cumcounts [ - 1 ]
sample = np . random . choice ( total , target , replace = replace )
sample . sort ( )
geneptr = 0
for count in sample :
while count >= cumcounts [ geneptr ] :
geneptr += 1
col [ geneptr ] += 1
return col
|
def get_vm_status ( self , device = 'FLOPPY' ) :
"""Returns the virtual media drive status ."""
|
dic = { 'DEVICE' : device . upper ( ) }
data = self . _execute_command ( 'GET_VM_STATUS' , 'RIB_INFO' , 'read' , dic )
return data [ 'GET_VM_STATUS' ]
|
def _str2array ( d ) :
"""Reconstructs a numpy array from a plain - text string"""
|
if type ( d ) == list :
return np . asarray ( [ _str2array ( s ) for s in d ] )
ins = StringIO ( d )
return np . loadtxt ( ins )
|
def calculate_traditional_regressor_output_shapes ( operator ) :
'''Allowed input / output patterns are
1 . [ N , C ] - - - > [ N , C ' ]
The number C ' is the length of prediction vector . It can be a scalar ( C ' = 1 ) or a vector ( C ' > 1)'''
|
check_input_and_output_types ( operator , good_input_types = [ FloatTensorType , Int64TensorType , FloatType , Int64Type ] )
if any ( len ( variable . type . shape ) != 2 for variable in operator . inputs ) :
raise RuntimeError ( 'Input(s) must be 2-D tensor(s)' )
model_type = operator . raw_operator . WhichOneof ( 'Type' )
if model_type == 'glmRegressor' :
glm = operator . raw_operator . glmRegressor
C = len ( glm . weights )
elif model_type == 'treeEnsembleRegressor' :
tree = operator . raw_operator . treeEnsembleRegressor . treeEnsemble
C = len ( tree . basePredictionValue )
elif model_type == 'supportVectorRegressor' :
C = 1
else :
raise ValueError ( 'Model should be one of linear model, tree-based model, and support vector machine' )
N = operator . inputs [ 0 ] . type . shape [ 0 ]
operator . outputs [ 0 ] . type = FloatTensorType ( [ N , C ] , doc_string = operator . outputs [ 0 ] . type . doc_string )
|
def inverse_gaussian_gradient ( image , alpha = 100.0 , sigma = 5.0 ) :
"""Inverse of gradient magnitude .
Compute the magnitude of the gradients in the image and then inverts the
result in the range [ 0 , 1 ] . Flat areas are assigned values close to 1,
while areas close to borders are assigned values close to 0.
This function or a similar one defined by the user should be applied over
the image as a preprocessing step before calling
` morphological _ geodesic _ active _ contour ` .
Parameters
image : ( M , N ) or ( L , M , N ) array
Grayscale image or volume .
alpha : float , optional
Controls the steepness of the inversion . A larger value will make the
transition between the flat areas and border areas steeper in the
resulting array .
sigma : float , optional
Standard deviation of the Gaussian filter applied over the image .
Returns
gimage : ( M , N ) or ( L , M , N ) array
Preprocessed image ( or volume ) suitable for
` morphological _ geodesic _ active _ contour ` ."""
|
gradnorm = ndi . gaussian_gradient_magnitude ( image , sigma , mode = 'nearest' )
return 1.0 / np . sqrt ( 1.0 + alpha * gradnorm )
|
def remove_analysis_from_worksheet ( analysis ) :
"""Removes the analysis passed in from the worksheet , if assigned to any"""
|
worksheet = analysis . getWorksheet ( )
if not worksheet :
return
analyses = filter ( lambda an : an != analysis , worksheet . getAnalyses ( ) )
worksheet . setAnalyses ( analyses )
worksheet . purgeLayout ( )
if analyses : # Maybe this analysis was the only one that was not yet submitted or
# verified , so try to submit or verify the Worksheet to be aligned
# with the current states of the analyses it contains .
doActionFor ( worksheet , "submit" )
doActionFor ( worksheet , "verify" )
else : # We ' ve removed all analyses . Rollback to " open "
doActionFor ( worksheet , "rollback_to_open" )
# Reindex the Worksheet
idxs = [ "getAnalysesUIDs" ]
push_reindex_to_actions_pool ( worksheet , idxs = idxs )
|
def decrypt ( self , password_encrypted ) :
"""Decrypt the password ."""
|
if not password_encrypted or not self . _crypter :
return password_encrypted or b''
return self . _crypter . decrypt ( password_encrypted )
|
def upload_data ( self , file_or_str , chunk_size = analyzere . upload_chunk_size , poll_interval = analyzere . upload_poll_interval , upload_callback = lambda x : None , commit_callback = lambda x : None ) :
"""Accepts a file - like object or string and uploads it . Files are
automatically uploaded in chunks . The default chunk size is 16MiB and
can be overwritten by specifying the number of bytes in the
` ` chunk _ size ` ` variable .
Accepts an optional poll _ interval for temporarily overriding the
default value ` analyzere . upload _ poll _ interval ` .
Implements the tus protocol .
Takes optional callbacks that return the percentage complete for the
given " phase " of upload : upload / commit .
Callback values are returned as 10.0 for 10%"""
|
if not callable ( upload_callback ) :
raise Exception ( 'provided upload_callback is not callable' )
if not callable ( commit_callback ) :
raise Exception ( 'provided commit_callback is not callable' )
file_obj = StringIO ( file_or_str ) if isinstance ( file_or_str , six . string_types ) else file_or_str
# Upload file with known entity size if file object supports random
# access .
length = None
if hasattr ( file_obj , 'seek' ) :
length = utils . file_length ( file_obj )
# Initiate upload session
request_raw ( 'post' , self . _data_path , headers = { 'Entity-Length' : str ( length ) } )
else :
request_raw ( 'post' , self . _data_path )
# Upload chunks
for chunk , offset in utils . read_in_chunks ( file_obj , chunk_size ) :
headers = { 'Offset' : str ( offset ) , 'Content-Type' : 'application/offset+octet-stream' }
request_raw ( 'patch' , self . _data_path , headers = headers , body = chunk )
# if there is a known size , and an upload callback , call it
if length :
upload_callback ( offset * 100.0 / length )
upload_callback ( 100.0 )
# Commit the session
request_raw ( 'post' , self . _commit_path )
# Block until data has finished processing
while True :
resp = self . upload_status
if ( resp . status == 'Processing Successful' or resp . status == 'Processing Failed' ) :
commit_callback ( 100.0 )
return resp
else :
commit_callback ( float ( resp . commit_progress ) )
time . sleep ( poll_interval )
|
def max_in_column ( tuple_list : list , index : int ) -> int :
"""Determines the maximum value in a specific column of a given list of tuples .
Args :
tuple _ list : a list of tuples that are assumed to have at least ' index ' elements
index : the 0 - based index for the column to find the maximum value in
Returns :
The highest value present at the ' index ' position across all tuples in ' tuple _ list ' .
Example :
> > > max _ in _ column ( [ ( 5 , 6 , 7 ) , ( 1 , 3 , 5 ) , ( 8 , 9 , 19 ) ] , 2)
19
> > > max _ in _ column ( [ ( 6 , 7 , 8 ) , ( 2 , 4 , 6 ) , ( 9 , 10 , 20 ) ] , 1)
10
> > > max _ in _ column ( [ ( 7 , 8 , 9 ) , ( 3 , 5 , 7 ) , ( 10 , 11 , 21 ) ] , 1)
11"""
|
highest_value = max ( sub [ index ] for sub in tuple_list )
return highest_value
|
def File ( name , mode = 'a' , chunk_cache_mem_size = 1024 ** 2 , w0 = 0.75 , n_cache_chunks = None , ** kwds ) :
"""Create h5py File object with cache specification
This function is basically just a wrapper around the usual h5py . File constructor ,
but accepts two additional keywords :
Parameters
name : str
mode : str
* * kwds : dict ( as keywords )
Standard h5py . File arguments , passed to its constructor
chunk _ cache _ mem _ size : int
Number of bytes to use for the chunk cache . Defaults to 1024 * * 2 ( 1MB ) , which
is also the default for h5py . File - - though it cannot be changed through the
standard interface .
w0 : float between 0.0 and 1.0
Eviction parameter . Defaults to 0.75 . " If the application will access the
same data more than once , w0 should be set closer to 0 , and if the application
does not , w0 should be set closer to 1 . "
- - - < https : / / www . hdfgroup . org / HDF5 / doc / Advanced / Chunking / >
n _ cache _ chunks : int
Number of chunks to be kept in cache at a time . Defaults to the ( smallest
integer greater than ) the square root of the number of elements that can fit
into memory . This is just used for the number of slots ( nslots ) maintained
in the cache metadata , so it can be set larger than needed with little cost ."""
|
import sys
import numpy as np
import h5py
name = name . encode ( sys . getfilesystemencoding ( ) )
open ( name , mode ) . close ( )
# Just make sure the file exists
if mode in [ m + b for m in [ 'w' , 'w+' , 'r+' , 'a' , 'a+' ] for b in [ '' , 'b' ] ] :
mode = h5py . h5f . ACC_RDWR
else :
mode = h5py . h5f . ACC_RDONLY
if 'dtype' in kwds :
bytes_per_object = np . dtype ( kwds [ 'dtype' ] ) . itemsize
else :
bytes_per_object = np . dtype ( np . float ) . itemsize
# assume float as most likely
if not n_cache_chunks :
n_cache_chunks = int ( np . ceil ( np . sqrt ( chunk_cache_mem_size / bytes_per_object ) ) )
nslots = _find_next_prime ( 100 * n_cache_chunks )
propfaid = h5py . h5p . create ( h5py . h5p . FILE_ACCESS )
settings = list ( propfaid . get_cache ( ) )
settings [ 1 : ] = ( nslots , chunk_cache_mem_size , w0 )
propfaid . set_cache ( * settings )
return h5py . File ( h5py . h5f . open ( name , flags = mode , fapl = propfaid ) , ** kwds )
|
def intialize ( self ) :
"""initialize the serial port with baudrate , timeout parameters"""
|
print '%s call intialize' % self . port
try :
self . deviceConnected = False
# init serial port
self . _connect ( )
if self . firmwarePrefix in self . UIStatusMsg :
self . deviceConnected = True
else :
self . UIStatusMsg = "Firmware Not Matching Expecting " + self . firmwarePrefix + " Now is " + self . UIStatusMsg
ModuleHelper . WriteIntoDebugLogger ( "Err: OpenThread device Firmware not matching.." )
except Exception , e :
ModuleHelper . WriteIntoDebugLogger ( "intialize() Error: " + str ( e ) )
self . deviceConnected = False
|
def remove_small_ns ( taus , devs , deverrs , ns ) :
"""Remove results with small number of samples .
If n is small ( = = 1 ) , reject the result
Parameters
taus : array
List of tau values for which deviation were computed
devs : array
List of deviations
deverrs : array or list of arrays
List of estimated errors ( possibly a list containing two arrays :
upper and lower values )
ns : array
Number of samples for each point
Returns
( taus , devs , deverrs , ns ) : tuple
Identical to input , except that values with low ns have been removed ."""
|
ns_big_enough = ns > 1
o_taus = taus [ ns_big_enough ]
o_devs = devs [ ns_big_enough ]
o_ns = ns [ ns_big_enough ]
if isinstance ( deverrs , list ) :
assert len ( deverrs ) < 3
o_deverrs = [ deverrs [ 0 ] [ ns_big_enough ] , deverrs [ 1 ] [ ns_big_enough ] ]
else :
o_deverrs = deverrs [ ns_big_enough ]
if len ( o_devs ) == 0 :
print ( "remove_small_ns() nothing remains!?" )
raise UserWarning
return o_taus , o_devs , o_deverrs , o_ns
|
def parse ( self , buffer , inlineparent = None ) :
'''Compatible to Parser . parse ( )'''
|
size = 0
v = [ ]
for i in range ( 0 , self . size ) : # @ UnusedVariable
r = self . innerparser . parse ( buffer [ size : ] , None )
if r is None :
return None
v . append ( r [ 0 ] )
size += r [ 1 ]
return ( v , size )
|
def surface ( self , param ) :
"""Return the detector surface point corresponding to ` ` param ` ` .
For parameter value ` ` p ` ` , the surface point is given by : :
surf = p * axis
Parameters
param : float or ` array - like `
Parameter value ( s ) at which to evaluate .
Returns
point : ` numpy . ndarray `
Vector ( s ) pointing from the origin to the detector surface
point at ` ` param ` ` .
If ` ` param ` ` is a single parameter , the returned array has
shape ` ` ( 2 , ) ` ` , otherwise ` ` param . shape + ( 2 , ) ` ` .
Examples
The method works with a single parameter , resulting in a single
vector :
> > > part = odl . uniform _ partition ( 0 , 1 , 10)
> > > det = Flat1dDetector ( part , axis = [ 1 , 0 ] )
> > > det . surface ( 0)
array ( [ 0 . , 0 . ] )
> > > det . surface ( 1)
array ( [ 1 . , 0 . ] )
It is also vectorized , i . e . , it can be called with multiple
parameters at once ( or an n - dimensional array of parameters ) :
> > > det . surface ( [ 0 , 1 ] )
array ( [ [ 0 . , 0 . ] ,
[ 1 . , 0 . ] ] )
> > > det . surface ( np . zeros ( ( 4 , 5 ) ) ) . shape
(4 , 5 , 2)"""
|
squeeze_out = ( np . shape ( param ) == ( ) )
param = np . array ( param , dtype = float , copy = False , ndmin = 1 )
if self . check_bounds and not is_inside_bounds ( param , self . params ) :
raise ValueError ( '`param` {} not in the valid range ' '{}' . format ( param , self . params ) )
# Create outer product of ` params ` and ` axis ` , resulting in shape
# params . shape + axis . shape
surf = np . multiply . outer ( param , self . axis )
if squeeze_out :
surf = surf . squeeze ( )
return surf
|
def history ( directory = None , rev_range = None , verbose = False , indicate_current = False ) :
"""List changeset scripts in chronological order ."""
|
config = current_app . extensions [ 'migrate' ] . migrate . get_config ( directory )
if alembic_version >= ( 0 , 9 , 9 ) :
command . history ( config , rev_range , verbose = verbose , indicate_current = indicate_current )
elif alembic_version >= ( 0 , 7 , 0 ) :
command . history ( config , rev_range , verbose = verbose )
else :
command . history ( config , rev_range )
|
async def wait_for_connection_lost ( self ) -> bool :
"""Wait until the TCP connection is closed or ` ` self . close _ timeout ` ` elapses .
Return ` ` True ` ` if the connection is closed and ` ` False ` ` otherwise ."""
|
if not self . connection_lost_waiter . done ( ) :
try :
await asyncio . wait_for ( asyncio . shield ( self . connection_lost_waiter ) , self . close_timeout , loop = self . loop , )
except asyncio . TimeoutError :
pass
# Re - check self . connection _ lost _ waiter . done ( ) synchronously because
# connection _ lost ( ) could run between the moment the timeout occurs
# and the moment this coroutine resumes running .
return self . connection_lost_waiter . done ( )
|
def submit_background_work_chain ( self , work_chain , parent_workunit_name = None ) :
""": API : public"""
|
background_root_workunit = self . run_tracker . get_background_root_workunit ( )
if parent_workunit_name : # We have to keep this workunit alive until all its child work is done , so
# we manipulate the context manually instead of using it as a contextmanager .
# This is slightly funky , but the with - context usage is so pervasive and
# useful elsewhere that it ' s worth the funkiness in this one place .
workunit_parent_ctx = self . run_tracker . new_workunit_under_parent ( name = parent_workunit_name , labels = [ WorkUnitLabel . MULTITOOL ] , parent = background_root_workunit )
workunit_parent = workunit_parent_ctx . __enter__ ( )
done_hook = lambda : workunit_parent_ctx . __exit__ ( None , None , None )
else :
workunit_parent = background_root_workunit
# Run directly under the root .
done_hook = None
self . run_tracker . background_worker_pool ( ) . submit_async_work_chain ( work_chain , workunit_parent = workunit_parent , done_hook = done_hook )
|
def add_tot_length ( self , qname , sname , value , sym = True ) :
"""Add a total length value to self . alignment _ lengths ."""
|
self . alignment_lengths . loc [ qname , sname ] = value
if sym :
self . alignment_lengths . loc [ sname , qname ] = value
|
async def read ( self , * , decode : bool = False ) -> Any :
"""Reads body part data .
decode : Decodes data following by encoding
method from Content - Encoding header . If it missed
data remains untouched"""
|
if self . _at_eof :
return b''
data = bytearray ( )
while not self . _at_eof :
data . extend ( ( await self . read_chunk ( self . chunk_size ) ) )
if decode :
return self . decode ( data )
return data
|
def add_group ( self , group_id , name , desc ) :
'''Add a new parameter group .
Parameters
group _ id : int
The numeric ID for a group to check or create .
name : str , optional
If a group is created , assign this name to the group .
desc : str , optional
If a group is created , assign this description to the group .
Returns
group : : class : ` Group `
A group with the given ID , name , and description .
Raises
KeyError
If a group with a duplicate ID or name already exists .'''
|
if group_id in self . groups :
raise KeyError ( group_id )
name = name . upper ( )
if name in self . groups :
raise KeyError ( name )
group = self . groups [ name ] = self . groups [ group_id ] = Group ( name , desc )
return group
|
def cmd_ip_internal ( verbose ) :
"""Get the local IP address ( es ) of the local interfaces .
Example :
$ habu . ip . internal
" lo " : {
" ipv4 " : [
" addr " : " 127.0.0.1 " ,
" netmask " : " 255.0.0.0 " ,
" peer " : " 127.0.0.1"
" link _ layer " : [
" addr " : " 00:00:00:00:00:00 " ,
" peer " : " 00:00:00:00:00:00"
" ipv6 " : [
" addr " : " : : 1 " ,
" netmask " : " ffff : ffff : ffff : ffff : ffff : ffff : ffff : ffff / 128" """
|
if verbose :
logging . basicConfig ( level = logging . INFO , format = '%(message)s' )
print ( "Gathering NIC details..." , file = sys . stderr )
result = get_internal_ip ( )
if result :
print ( json . dumps ( result , indent = 4 ) )
else :
print ( "[X] Unable to get detail about the interfaces" )
return True
|
def publish_request ( self , subject , reply , payload ) :
"""Publishes a message tagging it with a reply subscription
which can be used by those receiving the message to respond :
- > > PUB hello _ INBOX . 2007314fe0fcb2cdc2a2914c1 5
- > > MSG _ PAYLOAD : world
< < - MSG hello 2 _ INBOX . 2007314fe0fcb2cdc2a2914c1 5"""
|
payload_size = len ( payload )
if payload_size > self . _max_payload_size :
raise ErrMaxPayload
if self . is_closed :
raise ErrConnectionClosed
yield self . _publish ( subject , reply , payload , payload_size )
if self . _flush_queue . empty ( ) :
yield self . _flush_pending ( )
|
def loadUi ( modulefile , inst , uifile = None , theme = 'default' , className = None ) :
"""Load the ui file based on the module file location and the inputed class .
: param modulefile | < str >
inst | < subclass of QWidget >
uifile | < str > | | None
: return < QWidget >"""
|
if className is None :
className = inst . __class__ . __name__
import_qt ( globals ( ) )
currpath = QtCore . QDir . currentPath ( )
# use compiled information vs . dynamic generation
widget = None
if USE_COMPILED : # find the root module
def find_root_module ( cls , name ) :
if cls . __name__ == name :
return cls . __module__ . rpartition ( '.' ) [ 0 ]
else :
for base in cls . __bases__ :
if not issubclass ( base , QtGui . QWidget ) :
continue
out = find_root_module ( base , name )
if out :
return out
wrapper = QT_WRAPPER . lower ( )
root_module = find_root_module ( inst . __class__ , className )
if not root_module :
root_module = inst . __module__ . rpartition ( '.' ) [ 0 ]
basename = className . lower ( )
modname_a = '{0}.ui.{1}_{2}_{3}_ui' . format ( root_module , basename , theme , wrapper )
modname_b = '{0}.ui.{1}_{2}_ui' . format ( root_module , basename , wrapper )
modname_c = '{0}.ui.{1}_{2}_ui' . format ( root_module , basename , theme )
modname_d = '{0}.ui.{1}_ui' . format ( root_module , basename )
module = None
for modname in ( modname_a , modname_b , modname_c , modname_d ) :
modname = modname . strip ( '.' )
logger . debug ( 'Loading module: {0}...' . format ( modname ) )
try :
__import__ ( modname )
module = sys . modules [ modname ]
break
except StandardError :
pass
# successfully loaded a module
if module : # load the module information
cls = getattr ( module , 'Ui_%s' % className , None )
if not cls :
for key in module . __dict__ . keys ( ) :
if key . startswith ( 'Ui_' ) :
cls = getattr ( module , key )
break
# generate the class information
if cls :
widget = cls ( )
widget . setupUi ( inst )
inst . __dict__ . update ( widget . __dict__ )
if not widget :
if not uifile :
uifile = uiFile ( modulefile , inst , theme , className = className )
# normalize the path
uifile = os . path . normpath ( uifile )
if os . path . exists ( uifile ) :
QtCore . QDir . setCurrent ( os . path . dirname ( uifile ) )
widget = uic . loadUi ( uifile , inst )
QtCore . QDir . setCurrent ( currpath )
inst . addActions ( findUiActions ( inst ) )
return widget
|
def calculate_size ( name , overflow_policy , value ) :
"""Calculates the request payload size"""
|
data_size = 0
data_size += calculate_size_str ( name )
data_size += INT_SIZE_IN_BYTES
data_size += calculate_size_data ( value )
return data_size
|
def random ( self , length = 22 ) :
"""Generate and return a cryptographically - secure short random string
of the specified length ."""
|
random_num = int ( binascii . b2a_hex ( os . urandom ( length ) ) , 16 )
return self . _num_to_string ( random_num , pad_to_length = length ) [ : length ]
|
def update_dataset ( self , dataset_id , friendly_name = None , description = None , access = None , project_id = None ) :
"""Updates information in an existing dataset . The update method
replaces the entire dataset resource , whereas the patch method only
replaces fields that are provided in the submitted dataset resource .
Parameters
dataset _ id : str
Unique ` ` str ` ` identifying the dataset with the project ( the
referencedId of the dataset )
friendly _ name : str , optional
An optional descriptive name for the dataset .
description : str , optional
An optional description of the dataset .
access : list , optional
Indicating access permissions
project _ id : str , optional
Unique ` ` str ` ` identifying the BigQuery project contains the dataset
Returns
Union [ bool , dict ]
` ` bool ` ` indicating if the update was successful or not , or
response from BigQuery if swallow _ results is set for False ."""
|
project_id = self . _get_project_id ( project_id )
try :
datasets = self . bigquery . datasets ( )
body = self . dataset_resource ( dataset_id , friendly_name = friendly_name , description = description , access = access , project_id = project_id )
request = datasets . update ( projectId = project_id , datasetId = dataset_id , body = body )
response = request . execute ( num_retries = self . num_retries )
if self . swallow_results :
return True
else :
return response
except HttpError as e :
logger . error ( 'Cannot update dataset {0}: {1}' . format ( dataset_id , e ) )
if self . swallow_results :
return False
else :
return { }
|
def get_days_since_last_modified ( filename ) :
""": param filename : Absolute file path
: return : Number of days since filename ' s last modified time"""
|
now = datetime . now ( )
last_modified = datetime . fromtimestamp ( os . path . getmtime ( filename ) )
return ( now - last_modified ) . days
|
def squawk ( self ) -> Set [ str ] :
"""Returns all the unique squawk values in the trajectory ."""
|
return set ( self . data . squawk . ffill ( ) . bfill ( ) )
|
def key_map ( f , m , * args , ** kwargs ) :
'''key _ map ( f , m ) is equivalent to { f ( k ) : v for ( k , v ) in m . items ( ) } except that it returns a
persistent mapping object instead of a dict . Additionally , it respects the laziness of maps
and does not evaluate the values of a lazy map that has been passed to it .
key _ map ( f , m , * args , * * kwargs ) uses f ( k , * args , * * kwargs ) instead of f ( k ) .'''
|
if is_lazy_map ( m ) :
from . table import ( is_itable , itable )
def _curry_getval ( k ) :
return lambda : m [ k ]
m0 = { f ( k , * args , ** kwargs ) : _curry_getval ( k ) for k in six . iterkeys ( m ) }
return itable ( m0 ) if is_itable ( m ) else lazy_map ( m0 )
else :
return ps . pmap ( { f ( k , * args , ** kwargs ) : v for ( k , v ) in six . iteritems ( m ) } )
|
def value_is_int ( self ) :
"""Identify whether the value text is an int ."""
|
try :
a = float ( self . value )
b = int ( a )
except ValueError :
return False
else :
return a == b
|
def _work ( self ) :
"""Process the information regarding the available ports ."""
|
if self . _refresh_cache : # Inconsistent cache might cause exceptions . For example ,
# if a port has been removed , it will be known in the next
# loop . Using the old switch port can cause exceptions .
LOG . debug ( "Refreshing os_win caches..." )
self . _utils . update_cache ( )
self . _refresh_cache = False
if self . _bound_ports or self . _unbound_ports :
eventlet . spawn_n ( self . _notify_plugin_on_port_updates )
# notify plugin about port deltas
if self . _added_ports :
LOG . debug ( "Agent loop has new devices!" )
self . _treat_devices_added ( )
if self . _removed_ports :
LOG . debug ( "Agent loop has lost devices..." )
self . _treat_devices_removed ( )
|
def acquire ( self , blocking = True , timeout = None ) :
"""Attempt to acquire this lock .
If the optional argument " blocking " is True and " timeout " is None ,
this methods blocks until is successfully acquires the lock . If
" blocking " is False , it returns immediately if the lock could not
be acquired . Otherwise , it blocks for at most " timeout " seconds
trying to acquire the lock .
In all cases , this methods returns True if the lock was successfully
acquired and False otherwise ."""
|
if timeout is None :
return self . __lock . acquire ( blocking )
else : # Simulated timeout using progressively longer sleeps .
# This is the same timeout scheme used in the stdlib Condition
# class . If there ' s lots of contention on the lock then there ' s
# a good chance you won ' t get it ; but then again , Python doesn ' t
# guarantee fairness anyway . We hope that platform - specific
# extensions can provide a better mechanism .
endtime = _time ( ) + timeout
delay = 0.0005
while not self . __lock . acquire ( False ) :
remaining = endtime - _time ( )
if remaining <= 0 :
return False
delay = min ( delay * 2 , remaining , 0.05 )
_sleep ( delay )
return True
|
def from_dict ( cls , data ) :
""": type data : dict [ str , str ]
: rtype : satosa . internal . AuthenticationInformation
: param data : A dict representation of an AuthenticationInformation object
: return : An AuthenticationInformation object"""
|
return cls ( auth_class_ref = data . get ( "auth_class_ref" ) , timestamp = data . get ( "timestamp" ) , issuer = data . get ( "issuer" ) , )
|
def elbow_method ( data , k_min , k_max , distance = 'euclidean' ) :
"""Calculates and plots the plot of variance explained - number of clusters
Implementation reference : https : / / github . com / sarguido / k - means - clustering . rst
: param data : The dataset
: param k _ min : lowerbound of the cluster range
: param k _ max : upperbound of the cluster range
: param distance : the distance metric , ' euclidean ' by default
: return :"""
|
# Determine your k range
k_range = range ( k_min , k_max )
# Fit the kmeans model for each n _ clusters = k
k_means_var = [ Clustering . kmeans ( k ) . fit ( data ) for k in k_range ]
# Pull out the cluster centers for each model
centroids = [ X . model . cluster_centers_ for X in k_means_var ]
# Calculate the Euclidean distance from
# each point to each cluster center
k_euclid = [ cdist ( data , cent , distance ) for cent in centroids ]
dist = [ np . min ( ke , axis = 1 ) for ke in k_euclid ]
# Total within - cluster sum of squares
wcss = [ sum ( d ** 2 ) for d in dist ]
# The total sum of squares
tss = sum ( pdist ( data ) ** 2 ) / data . shape [ 0 ]
# The between - cluster sum of squares
bss = tss - wcss
# elbow curve
fig = plt . figure ( )
ax = fig . add_subplot ( 111 )
ax . plot ( k_range , bss / tss * 100 , 'b*-' )
ax . set_ylim ( ( 0 , 100 ) )
plt . grid ( True )
plt . xlabel ( 'n_clusters' )
plt . ylabel ( 'Percentage of variance explained' )
plt . title ( 'Variance Explained vs. k' )
plt . show ( )
|
def flatten ( obj , isinstance = isinstance , StringTypes = StringTypes , SequenceTypes = SequenceTypes , do_flatten = do_flatten ) :
"""Flatten a sequence to a non - nested list .
Flatten ( ) converts either a single scalar or a nested sequence
to a non - nested list . Note that flatten ( ) considers strings
to be scalars instead of sequences like Python would ."""
|
if isinstance ( obj , StringTypes ) or not isinstance ( obj , SequenceTypes ) :
return [ obj ]
result = [ ]
for item in obj :
if isinstance ( item , StringTypes ) or not isinstance ( item , SequenceTypes ) :
result . append ( item )
else :
do_flatten ( item , result )
return result
|
def gen_search_article_url ( keyword , page = 1 , timesn = WechatSogouConst . search_article_time . anytime , article_type = WechatSogouConst . search_article_type . all , ft = None , et = None ) :
"""拼接搜索 文章 URL
Parameters
keyword : str or unicode
搜索文字
page : int , optional
页数 the default is 1
timesn : WechatSogouConst . search _ article _ time
时间 anytime 没有限制 / day 一天 / week 一周 / month 一月 / year 一年 / specific 自定
默认是 anytime
article _ type : WechatSogouConst . search _ article _ type
含有内容的类型 image 有图 / video 有视频 / rich 有图和视频 / all 啥都有
默认是 all
ft , et : datetime . date
当 tsn 是 specific 时 , ft 代表开始时间 , 如 : 2017-07-01
当 tsn 是 specific 时 , et 代表结束时间 , 如 : 2017-07-15
Returns
str
search _ article _ url"""
|
assert isinstance ( page , int ) and page > 0
assert timesn in [ WechatSogouConst . search_article_time . anytime , WechatSogouConst . search_article_time . day , WechatSogouConst . search_article_time . week , WechatSogouConst . search_article_time . month , WechatSogouConst . search_article_time . year , WechatSogouConst . search_article_time . specific ]
if timesn == WechatSogouConst . search_article_time . specific :
assert isinstance ( ft , datetime . date )
assert isinstance ( et , datetime . date )
assert ft <= et
else :
ft = ''
et = ''
interation_image = 458754
interation_video = 458756
if article_type == WechatSogouConst . search_article_type . rich :
interation = '{},{}' . format ( interation_image , interation_video )
elif article_type == WechatSogouConst . search_article_type . image :
interation = interation_image
elif article_type == WechatSogouConst . search_article_type . video :
interation = interation_video
else :
interation = ''
qs_dict = OrderedDict ( )
qs_dict [ 'type' ] = _search_type_article
qs_dict [ 'page' ] = page
qs_dict [ 'ie' ] = 'utf8'
qs_dict [ 'query' ] = keyword
qs_dict [ 'interation' ] = interation
if timesn != 0 :
qs_dict [ 'tsn' ] = timesn
qs_dict [ 'ft' ] = str ( ft )
qs_dict [ 'et' ] = str ( et )
# TODO 账号内搜索
# ' 账号内 http : / / weixin . sogou . com / weixin ? type = 2 & ie = utf8 & query = % E9 % AB % 98 % E8%80%83 & tsn = 3 & ft = & et = & interation = 458754
# & wxid = oIWsFt1tmWoG6vO6BcsS7St61bRE & usip = nanhangqinggong '
# qs [ ' wxid ' ] = wxid
# qs [ ' usip ' ] = usip
return 'http://weixin.sogou.com/weixin?{}' . format ( urlencode ( qs_dict ) )
|
def main ( argv ) :
"""Entry point for command line script to perform OAuth 2.0."""
|
p = argparse . ArgumentParser ( )
p . add_argument ( '-s' , '--scope' , nargs = '+' )
p . add_argument ( '-o' , '--oauth-service' , default = 'google' )
p . add_argument ( '-i' , '--client-id' )
p . add_argument ( '-x' , '--client-secret' )
p . add_argument ( '-r' , '--redirect-uri' )
p . add_argument ( '-f' , '--client-secrets' )
args = p . parse_args ( argv )
client_args = ( args . client_id , args . client_secret , args . client_id )
if any ( client_args ) and not all ( client_args ) :
print ( 'Must provide none of client-id, client-secret and redirect-uri;' ' or all of them.' )
p . print_usage ( )
return 1
print args . scope
if not args . scope :
print ( 'Scope must be provided.' )
p . print_usage ( )
return 1
config = WizardClientConfig ( )
config . scope = ' ' . join ( args . scope )
print ( run_local ( UserOAuth2 ( config ) ) [ 'access_token' ] )
return 0
|
def get_fd_qnm ( template = None , ** kwargs ) :
"""Return a frequency domain damped sinusoid .
Parameters
template : object
An object that has attached properties . This can be used to substitute
for keyword arguments . A common example would be a row in an xml table .
f _ 0 : float
The ringdown - frequency .
tau : float
The damping time of the sinusoid .
amp : float
The amplitude of the ringdown ( constant for now ) .
phi : float
The initial phase of the ringdown . Should also include the information
from the azimuthal angle ( phi _ 0 + m * Phi ) .
inclination : { None , float } , optional
Inclination of the system in radians for the spherical harmonics .
l : { 2 , int } , optional
l mode for the spherical harmonics . Default is l = 2.
m : { 2 , int } , optional
m mode for the spherical harmonics . Default is m = 2.
t _ 0 : { 0 , float } , optional
The starting time of the ringdown .
delta _ f : { None , float } , optional
The frequency step used to generate the ringdown .
If None , it will be set to the inverse of the time at which the
amplitude is 1/1000 of the peak amplitude .
f _ lower : { None , float } , optional
The starting frequency of the output frequency series .
If None , it will be set to delta _ f .
f _ final : { None , float } , optional
The ending frequency of the output frequency series .
If None , it will be set to the frequency at which the amplitude is
1/1000 of the peak amplitude .
Returns
hplustilde : FrequencySeries
The plus phase of the ringdown in frequency domain .
hcrosstilde : FrequencySeries
The cross phase of the ringdown in frequency domain ."""
|
input_params = props ( template , qnm_required_args , ** kwargs )
f_0 = input_params . pop ( 'f_0' )
tau = input_params . pop ( 'tau' )
amp = input_params . pop ( 'amp' )
phi = input_params . pop ( 'phi' )
# the following have defaults , and so will be populated
t_0 = input_params . pop ( 't_0' )
# the following may not be in input _ params
inc = input_params . pop ( 'inclination' , None )
l = input_params . pop ( 'l' , 2 )
m = input_params . pop ( 'm' , 2 )
delta_f = input_params . pop ( 'delta_f' , None )
f_lower = input_params . pop ( 'f_lower' , None )
f_final = input_params . pop ( 'f_final' , None )
if not delta_f :
delta_f = 1. / qnm_time_decay ( tau , 1. / 1000 )
if not f_lower :
f_lower = delta_f
kmin = 0
else :
kmin = int ( f_lower / delta_f )
if not f_final :
f_final = qnm_freq_decay ( f_0 , tau , 1. / 1000 )
if f_final > max_freq :
f_final = max_freq
kmax = int ( f_final / delta_f ) + 1
freqs = numpy . arange ( kmin , kmax ) * delta_f
if inc is not None :
Y_plus , Y_cross = spher_harms ( l , m , inc )
else :
Y_plus , Y_cross = 1 , 1
denominator = 1 + ( 4j * pi * freqs * tau ) - ( 4 * pi_sq * ( freqs * freqs - f_0 * f_0 ) * tau * tau )
norm = amp * tau / denominator
if t_0 != 0 :
time_shift = numpy . exp ( - 1j * two_pi * freqs * t_0 )
norm *= time_shift
# Analytical expression for the Fourier transform of the ringdown ( damped sinusoid )
hp_tilde = norm * Y_plus * ( ( 1 + 2j * pi * freqs * tau ) * numpy . cos ( phi ) - two_pi * f_0 * tau * numpy . sin ( phi ) )
hc_tilde = norm * Y_cross * ( ( 1 + 2j * pi * freqs * tau ) * numpy . sin ( phi ) + two_pi * f_0 * tau * numpy . cos ( phi ) )
outplus = FrequencySeries ( zeros ( kmax , dtype = complex128 ) , delta_f = delta_f )
outcross = FrequencySeries ( zeros ( kmax , dtype = complex128 ) , delta_f = delta_f )
outplus . data [ kmin : kmax ] = hp_tilde
outcross . data [ kmin : kmax ] = hc_tilde
return outplus , outcross
|
def _get ( self , key , parser_result ) :
"""Given a type and a dict of parser results , return
the items as a list ."""
|
try :
list_data = parser_result [ key ] . asList ( )
if any ( isinstance ( obj , str ) for obj in list_data ) :
txt_lines = [ '' . join ( list_data ) ]
else :
txt_lines = [ '' . join ( f ) for f in list_data ]
except KeyError :
txt_lines = [ ]
return txt_lines
|
def getSDSSImage ( ra , dec , radius = 1.0 , xsize = 800 , opt = 'GML' , ** kwargs ) :
"""Download Sloan Digital Sky Survey images
http : / / skyserver . sdss3 . org / dr9 / en / tools / chart / chart . asp
radius ( degrees )
opts : ( G ) Grid , ( L ) Label , P ( PhotoObj ) , S ( SpecObj ) , O ( Outline ) , ( B ) Bounding Box ,
( F ) Fields , ( M ) Mask , ( Q ) Plates , ( I ) Invert"""
|
import subprocess
import tempfile
url = "http://skyservice.pha.jhu.edu/DR10/ImgCutout/getjpeg.aspx?"
scale = 2. * radius * 3600. / xsize
params = dict ( ra = ra , dec = dec , width = xsize , height = xsize , scale = scale , opt = opt )
query = '&' . join ( "%s=%s" % ( k , v ) for k , v in params . items ( ) )
tmp = tempfile . NamedTemporaryFile ( suffix = '.jpeg' )
cmd = 'wget --progress=dot:mega -O %s "%s"' % ( tmp . name , url + query )
subprocess . call ( cmd , shell = True )
im = plt . imread ( tmp . name )
tmp . close ( )
return im
|
def _solve_forbase_address ( self , function_starts , functions ) :
"""Voting for the most possible base address .
: param function _ starts :
: param functions :
: returns :"""
|
pseudo_base_addr = self . project . loader . main_object . min_addr
base_addr_ctr = { }
for s in function_starts :
for f in functions :
base_addr = s - f + pseudo_base_addr
ctr = 1
for k in function_starts :
if k - base_addr + pseudo_base_addr in functions :
ctr += 1
if ctr > 5 :
base_addr_ctr [ base_addr ] = ctr
if len ( base_addr_ctr ) :
base_addr , hits = sorted ( [ ( k , v ) for k , v in base_addr_ctr . items ( ) ] , key = lambda x : x [ 1 ] , reverse = True ) [ 0 ]
return base_addr
else :
return None
|
def dump_cookie ( key , value = '' , max_age = None , expires = None , path = '/' , domain = None , secure = False , httponly = False , charset = 'utf-8' , sync_expires = True ) :
"""Creates a new Set - Cookie header without the ` ` Set - Cookie ` ` prefix
The parameters are the same as in the cookie Morsel object in the
Python standard library but it accepts unicode data , too .
On Python 3 the return value of this function will be a unicode
string , on Python 2 it will be a native string . In both cases the
return value is usually restricted to ascii as the vast majority of
values are properly escaped , but that is no guarantee . If a unicode
string is returned it ' s tunneled through latin1 as required by
PEP 3333.
The return value is not ASCII safe if the key contains unicode
characters . This is technically against the specification but
happens in the wild . It ' s strongly recommended to not use
non - ASCII values for the keys .
: param max _ age : should be a number of seconds , or ` None ` ( default ) if
the cookie should last only as long as the client ' s
browser session . Additionally ` timedelta ` objects
are accepted , too .
: param expires : should be a ` datetime ` object or unix timestamp .
: param path : limits the cookie to a given path , per default it will
span the whole domain .
: param domain : Use this if you want to set a cross - domain cookie . For
example , ` ` domain = " . example . com " ` ` will set a cookie
that is readable by the domain ` ` www . example . com ` ` ,
` ` foo . example . com ` ` etc . Otherwise , a cookie will only
be readable by the domain that set it .
: param secure : The cookie will only be available via HTTPS
: param httponly : disallow JavaScript to access the cookie . This is an
extension to the cookie standard and probably not
supported by all browsers .
: param charset : the encoding for unicode values .
: param sync _ expires : automatically set expires if max _ age is defined
but expires not ."""
|
key = to_bytes ( key , charset )
value = to_bytes ( value , charset )
if path is not None :
path = iri_to_uri ( path , charset )
domain = _make_cookie_domain ( domain )
if isinstance ( max_age , timedelta ) :
max_age = ( max_age . days * 60 * 60 * 24 ) + max_age . seconds
if expires is not None :
if not isinstance ( expires , string_types ) :
expires = cookie_date ( expires )
elif max_age is not None and sync_expires :
expires = to_bytes ( cookie_date ( time ( ) + max_age ) )
buf = [ key + b'=' + _cookie_quote ( value ) ]
# XXX : In theory all of these parameters that are not marked with ` None `
# should be quoted . Because stdlib did not quote it before I did not
# want to introduce quoting there now .
for k , v , q in ( ( b'Domain' , domain , True ) , ( b'Expires' , expires , False , ) , ( b'Max-Age' , max_age , False ) , ( b'Secure' , secure , None ) , ( b'HttpOnly' , httponly , None ) , ( b'Path' , path , False ) ) :
if q is None :
if v :
buf . append ( k )
continue
if v is None :
continue
tmp = bytearray ( k )
if not isinstance ( v , ( bytes , bytearray ) ) :
v = to_bytes ( text_type ( v ) , charset )
if q :
v = _cookie_quote ( v )
tmp += b'=' + v
buf . append ( bytes ( tmp ) )
# The return value will be an incorrectly encoded latin1 header on
# Python 3 for consistency with the headers object and a bytestring
# on Python 2 because that ' s how the API makes more sense .
rv = b'; ' . join ( buf )
if not PY2 :
rv = rv . decode ( 'latin1' )
return rv
|
def strptime ( cls , date_string , format ) :
'string , format - > new datetime parsed from a string ( like time . strptime ( ) ) .'
|
import _strptime
return _strptime . _strptime_datetime ( cls , date_string , format )
|
def validate_split_runs_file ( split_runs_file ) :
"""Check if structure of file is as expected and return dictionary linking names to run _ IDs ."""
|
try :
content = [ l . strip ( ) for l in split_runs_file . readlines ( ) ]
if content [ 0 ] . upper ( ) . split ( '\t' ) == [ 'NAME' , 'RUN_ID' ] :
return { c . split ( '\t' ) [ 1 ] : c . split ( '\t' ) [ 0 ] for c in content [ 1 : ] if c }
else :
sys . exit ( "ERROR: Mandatory header of --split_runs tsv file not found: 'NAME', 'RUN_ID'" )
logging . error ( "Mandatory header of --split_runs tsv file not found: 'NAME', 'RUN_ID'" )
except IndexError :
sys . exit ( "ERROR: Format of --split_runs tab separated file not as expected" )
logging . error ( "ERROR: Format of --split_runs tab separated file not as expected" )
|
def open ( self , options = None , mimetype = 'application/octet-stream' ) :
"""open : return a file like object for self .
The method can be used with the ' with ' statment ."""
|
return self . connection . open ( self , options , mimetype )
|
def crop ( im , r , c , sz_h , sz_w ) :
'''crop image into a square of size sz ,'''
|
return im [ r : r + sz_h , c : c + sz_w ]
|
def get_declared_enums ( metadata , schema , default ) :
"""Return a dict mapping SQLAlchemy enumeration types to the set of their
declared values .
: param metadata :
: param str schema :
Schema name ( e . g . " public " ) .
: returns dict :
" my _ enum " : frozenset ( [ " a " , " b " , " c " ] ) ,"""
|
types = set ( column . type for table in metadata . tables . values ( ) for column in table . columns if ( isinstance ( column . type , sqlalchemy . Enum ) and schema == ( column . type . schema or default ) ) )
return { t . name : frozenset ( t . enums ) for t in types }
|
def get_clear_pin ( pinblock , account_number ) :
"""Calculate the clear PIN from provided PIN block and account _ number , which is the 12 right - most digits of card account number , excluding check digit"""
|
raw_pinblock = bytes . fromhex ( pinblock . decode ( 'utf-8' ) )
raw_acct_num = bytes . fromhex ( ( b'0000' + account_number ) . decode ( 'utf-8' ) )
pin_str = xor ( raw2B ( raw_pinblock ) , raw2B ( raw_acct_num ) ) . decode ( 'utf-8' )
pin_length = int ( pin_str [ : 2 ] , 16 )
if pin_length >= 4 and pin_length < 9 :
pin = pin_str [ 2 : 2 + pin_length ]
try :
int ( pin )
except ValueError :
raise ValueError ( 'PIN contains non-numeric characters' )
return bytes ( pin , 'utf-8' )
else :
raise ValueError ( 'Incorrect PIN length: {}' . format ( pin_length ) )
|
def parse_queue ( self , global_params , region , queue_url ) :
"""Parse a single queue and fetch additional attributes
: param global _ params : Parameters shared for all regions
: param region : Name of the AWS region
: param queue _ url : URL of the AWS queue"""
|
queue = { 'QueueUrl' : queue_url }
attributes = api_clients [ region ] . get_queue_attributes ( QueueUrl = queue_url , AttributeNames = [ 'CreatedTimestamp' , 'Policy' , 'QueueArn' ] ) [ 'Attributes' ]
queue [ 'arn' ] = attributes . pop ( 'QueueArn' )
for k in [ 'CreatedTimestamp' ] :
queue [ k ] = attributes [ k ] if k in attributes else None
if 'Policy' in attributes :
queue [ 'Policy' ] = json . loads ( attributes [ 'Policy' ] )
else :
queue [ 'Policy' ] = { 'Statement' : [ ] }
queue [ 'name' ] = queue [ 'arn' ] . split ( ':' ) [ - 1 ]
self . queues [ queue [ 'name' ] ] = queue
|
def xmoe_tr_dense_2k ( ) :
"""Series of architectural experiments on Translation .
# run on 8 - core setup
119M params , einsum = 0.95e13
Returns :
a hparams"""
|
hparams = mtf_transformer2 . mtf_bitransformer_base ( )
hparams . encoder_layers = [ "self_att" , "drd" ] * 4
hparams . decoder_layers = [ "self_att" , "enc_att" , "drd" ] * 4
hparams . batch_size = 64
hparams . shared_embedding_and_softmax_weights = True
hparams . mesh_shape = "batch:8"
return hparams
|
def _extract_one_pair ( body ) :
"""Extract one language - text pair from a : class : ` ~ . LanguageMap ` .
This is used for tracking ."""
|
if not body :
return None , None
try :
return None , body [ None ]
except KeyError :
return min ( body . items ( ) , key = lambda x : x [ 0 ] )
|
def marching_cubes ( self ) :
"""A marching cubes Trimesh representation of the voxels .
No effort was made to clean or smooth the result in any way ;
it is merely the result of applying the scikit - image
measure . marching _ cubes function to self . matrix .
Returns
meshed : Trimesh object representing the current voxel
object , as returned by marching cubes algorithm ."""
|
meshed = matrix_to_marching_cubes ( matrix = self . matrix , pitch = self . pitch , origin = self . origin )
return meshed
|
def Register ( self , app_id , challenge , registered_keys ) :
"""Registers app _ id with the security key .
Executes the U2F registration flow with the security key .
Args :
app _ id : The app _ id to register the security key against .
challenge : Server challenge passed to the security key .
registered _ keys : List of keys already registered for this app _ id + user .
Returns :
RegisterResponse with key _ handle and attestation information in it (
encoded in FIDO U2F binary format within registration _ data field ) .
Raises :
U2FError : There was some kind of problem with registration ( e . g .
the device was already registered or there was a timeout waiting
for the test of user presence ) ."""
|
client_data = model . ClientData ( model . ClientData . TYP_REGISTRATION , challenge , self . origin )
challenge_param = self . InternalSHA256 ( client_data . GetJson ( ) )
app_param = self . InternalSHA256 ( app_id )
for key in registered_keys :
try : # skip non U2F _ V2 keys
if key . version != u'U2F_V2' :
continue
resp = self . security_key . CmdAuthenticate ( challenge_param , app_param , key . key_handle , True )
# check _ only mode CmdAuthenticate should always raise some
# exception
raise errors . HardwareError ( 'Should Never Happen' )
except errors . TUPRequiredError : # This indicates key was valid . Thus , no need to register
raise errors . U2FError ( errors . U2FError . DEVICE_INELIGIBLE )
except errors . InvalidKeyHandleError as e : # This is the case of a key for a different token , so we just ignore it .
pass
except errors . HardwareError as e :
raise errors . U2FError ( errors . U2FError . BAD_REQUEST , e )
# Now register the new key
for _ in range ( 30 ) :
try :
resp = self . security_key . CmdRegister ( challenge_param , app_param )
return model . RegisterResponse ( resp , client_data )
except errors . TUPRequiredError as e :
self . security_key . CmdWink ( )
time . sleep ( 0.5 )
except errors . HardwareError as e :
raise errors . U2FError ( errors . U2FError . BAD_REQUEST , e )
raise errors . U2FError ( errors . U2FError . TIMEOUT )
|
def _save_stats ( self , epoch_data : EpochData ) -> None :
"""Extend ` ` epoch _ data ` ` by stream : variable : aggreagation data .
: param epoch _ data : data source from which the statistics are computed"""
|
for stream_name in epoch_data . keys ( ) :
for variable , aggregations in self . _variable_aggregations . items ( ) : # variables are already checked in the AccumulatingHook ; hence , we do not check them here
epoch_data [ stream_name ] [ variable ] = OrderedDict ( { aggr : ComputeStats . _compute_aggregation ( aggr , self . _accumulator [ stream_name ] [ variable ] ) for aggr in aggregations } )
|
def apply ( self , root ) :
"""Apply the import ( rule ) to the specified schema .
If the schema does not already contain an import for the
I { namespace } specified here , it is added .
@ param root : A schema root .
@ type root : L { Element }"""
|
if not self . filter . match ( root , self . ns ) :
return
if self . exists ( root ) :
return
node = Element ( 'import' , ns = self . xsdns )
node . set ( 'namespace' , self . ns )
if self . location is not None :
node . set ( 'schemaLocation' , self . location )
log . debug ( 'inserting: %s' , node )
root . insert ( node )
|
def _get_shard ( self , shard ) :
"""Dynamically Builds methods to query shard with proper with arg and kwargs support"""
|
@ wraps ( API_WRAPPER . _get_shard )
def get_shard ( * arg , ** kwargs ) :
"""Gets the shard '{}'""" . format ( shard )
return self . get_shards ( Shard ( shard , * arg , ** kwargs ) )
return get_shard
|
def set_base_prompt ( self , pri_prompt_terminator = "$" , alt_prompt_terminator = "#" , delay_factor = 1 ) :
"""Determine base prompt ."""
|
return super ( LinuxSSH , self ) . set_base_prompt ( pri_prompt_terminator = pri_prompt_terminator , alt_prompt_terminator = alt_prompt_terminator , delay_factor = delay_factor , )
|
def create_identity_pool ( IdentityPoolName , AllowUnauthenticatedIdentities = False , SupportedLoginProviders = None , DeveloperProviderName = None , OpenIdConnectProviderARNs = None , region = None , key = None , keyid = None , profile = None ) :
'''Creates a new identity pool . All parameters except for IdentityPoolName is optional .
SupportedLoginProviders should be a dictionary mapping provider names to provider app
IDs . OpenIdConnectProviderARNs should be a list of OpenID Connect provider ARNs .
Returns the created identity pool if successful
CLI Example :
. . code - block : : bash
salt myminion boto _ cognitoidentity . create _ identity _ pool my _ id _ pool _ name DeveloperProviderName = custom _ developer _ provider'''
|
SupportedLoginProviders = dict ( ) if SupportedLoginProviders is None else SupportedLoginProviders
OpenIdConnectProviderARNs = list ( ) if OpenIdConnectProviderARNs is None else OpenIdConnectProviderARNs
conn = _get_conn ( region = region , key = key , keyid = keyid , profile = profile )
try :
request_params = dict ( IdentityPoolName = IdentityPoolName , AllowUnauthenticatedIdentities = AllowUnauthenticatedIdentities , SupportedLoginProviders = SupportedLoginProviders , OpenIdConnectProviderARNs = OpenIdConnectProviderARNs )
if DeveloperProviderName :
request_params [ 'DeveloperProviderName' ] = DeveloperProviderName
response = conn . create_identity_pool ( ** request_params )
response . pop ( 'ResponseMetadata' , None )
return { 'created' : True , 'identity_pool' : response }
except ClientError as e :
return { 'created' : False , 'error' : __utils__ [ 'boto3.get_error' ] ( e ) }
|
def get_requirements ( lookup = None ) :
'''get _ requirements reads in requirements and versions from
the lookup obtained with get _ lookup'''
|
if lookup == None :
lookup = get_lookup ( )
install_requires = [ ]
for module in lookup [ 'INSTALL_REQUIRES' ] :
module_name = module [ 0 ]
module_meta = module [ 1 ]
if "exact_version" in module_meta :
dependency = "%s==%s" % ( module_name , module_meta [ 'exact_version' ] )
elif "min_version" in module_meta :
if module_meta [ 'min_version' ] == None :
dependency = module_name
else :
dependency = "%s>=%s" % ( module_name , module_meta [ 'min_version' ] )
install_requires . append ( dependency )
return install_requires
|
def get_sections ( self ) :
"""Returns an array with the sections to be displayed .
Every section is a dictionary with the following structure :
{ ' id ' : < section _ identifier > ,
' title ' : < section _ title > ,
' panels ' : < array of panels > }"""
|
sections = [ ]
user = api . user . get_current ( )
if is_panel_visible_for_user ( 'analyses' , user ) :
sections . append ( self . get_analyses_section ( ) )
if is_panel_visible_for_user ( 'analysisrequests' , user ) :
sections . append ( self . get_analysisrequests_section ( ) )
if is_panel_visible_for_user ( 'worksheets' , user ) :
sections . append ( self . get_worksheets_section ( ) )
return sections
|
def modified ( self ) :
"""Union [ datetime . datetime , None ] : Datetime at which the model was last
modified ( : data : ` None ` until set from the server ) .
Read - only ."""
|
value = self . _proto . last_modified_time
if value is not None and value != 0 : # value will be in milliseconds .
return google . cloud . _helpers . _datetime_from_microseconds ( 1000.0 * float ( value ) )
|
def water ( target , temperature = 'pore.temperature' , salinity = 'pore.salinity' ) :
r"""Calculates vapor pressure of pure water or seawater given by [ 1 ] based on
Raoult ' s law . The pure water vapor pressure is given by [ 2]
Parameters
target : OpenPNM Object
The object for which these values are being calculated . This
controls the length of the calculated array , and also provides
access to other necessary thermofluid properties .
temperature : string
The dictionary key containing the phase temperature values
salinity : string
The dictionary key containing the phase salinity values
Returns
The vapor pressure of water / seawater in [ Pa ]
Notes
T must be in K , and S in g of salt per kg of phase , or ppt ( parts per
thousand )
VALIDITY : 273 < T < 473 K ; 0 < S < 240 g / kg ;
ACCURACY : 0.5 %
References
[1 ] Sharqawy M . H . , Lienhard J . H . , and Zubair , S . M . , Desalination and
Water Treatment , 2010.
[2 ] ASHRAE handbook : Fundamentals , ASHRAE ; 2005."""
|
T = target [ temperature ]
if salinity in target . keys ( ) :
S = target [ salinity ]
else :
S = 0
a1 = - 5.8002206E+03
a2 = 1.3914993E+00
a3 = - 4.8640239E-02
a4 = 4.1764768E-05
a5 = - 1.4452093E-08
a6 = 6.5459673E+00
Pv_w = np . exp ( ( a1 / T ) + a2 + a3 * T + a4 * T ** 2 + a5 * T ** 3 + a6 * np . log ( T ) )
Pv_sw = Pv_w / ( 1 + 0.57357 * ( S / ( 1000 - S ) ) )
value = Pv_sw
return value
|
def draw_units ( self , surf ) :
"""Draw the units and buildings ."""
|
for u , p in self . _visible_units ( ) :
if self . _camera . intersects_circle ( p , u . radius ) :
fraction_damage = clamp ( ( u . health_max - u . health ) / ( u . health_max or 1 ) , 0 , 1 )
surf . draw_circle ( colors . PLAYER_ABSOLUTE_PALETTE [ u . owner ] , p , u . radius )
if fraction_damage > 0 :
surf . draw_circle ( colors . PLAYER_ABSOLUTE_PALETTE [ u . owner ] // 2 , p , u . radius * fraction_damage )
if u . shield and u . shield_max :
surf . draw_arc ( colors . blue , p , u . radius , 0 , 2 * math . pi * u . shield / u . shield_max )
if u . energy and u . energy_max :
surf . draw_arc ( colors . purple * 0.75 , p , u . radius - 0.05 , 0 , 2 * math . pi * u . energy / u . energy_max )
name = self . get_unit_name ( surf , self . _static_data . units . get ( u . unit_type , "<none>" ) , u . radius )
if name :
text = self . _font_small . render ( name , True , colors . white )
rect = text . get_rect ( )
rect . center = surf . world_to_surf . fwd_pt ( p )
surf . surf . blit ( text , rect )
if u . is_selected :
surf . draw_circle ( colors . green , p , u . radius + 0.1 , 1 )
|
def _ite ( test : str , in1 : str , in0 : str , output : str = None ) :
r"test - > in1 / \ ~ test - > in0"
|
assert len ( { test , in0 , in1 } ) == 3
true_out = bit_flipper ( [ test ] ) >> or_gate ( [ test , in1 ] , 'true_out' )
false_out = or_gate ( [ test , in0 ] , 'false_out' )
return ( true_out | false_out ) >> and_gate ( [ 'true_out' , 'false_out' ] , output )
|
def raise_ ( tp , value = None , tb = None ) :
"""A function that matches the Python 2 . x ` ` raise ` ` statement . This
allows re - raising exceptions with the cls value and traceback on
Python 2 and 3."""
|
if value is not None and isinstance ( tp , Exception ) :
raise TypeError ( "instance exception may not have a separate value" )
if value is not None :
exc = tp ( value )
else :
exc = tp
if exc . __traceback__ is not tb :
raise exc . with_traceback ( tb )
raise exc
|
def get_group ( self , group_id ) :
"""Get group for the provided group ID .
Args :
group _ id : Group ID for which group is to be determined .
Returns :
Group corresponding to the provided group ID ."""
|
group = self . group_id_map . get ( group_id )
if group :
return group
self . logger . error ( 'Group ID "%s" is not in datafile.' % group_id )
self . error_handler . handle_error ( exceptions . InvalidGroupException ( enums . Errors . INVALID_GROUP_ID_ERROR ) )
return None
|
def request ( url , * args , ** kwargs ) :
"""Do the HTTP Request and return data"""
|
method = kwargs . get ( 'method' , 'GET' )
timeout = kwargs . pop ( 'timeout' , 10 )
# hass default timeout
req = requests . request ( method , url , * args , timeout = timeout , ** kwargs )
data = req . json ( )
_LOGGER . debug ( json . dumps ( data ) )
return data
|
def _split_index ( params ) :
"""Delete index infromation from params"""
|
if isinstance ( params , list ) :
return [ params [ 0 ] , _split_index ( params [ 1 ] ) ]
elif isinstance ( params , dict ) :
if INDEX in params . keys ( ) :
return _split_index ( params [ VALUE ] )
result = dict ( )
for key in params :
result [ key ] = _split_index ( params [ key ] )
return result
else :
return params
|
async def get_json ( self , url , timeout = 30 , astext = False , exceptions = False ) :
"""Get URL and parse JSON from text ."""
|
try :
with async_timeout . timeout ( timeout ) :
res = await self . _aio_session . get ( url )
if res . status != 200 :
_LOGGER . error ( "QSUSB returned %s [%s]" , res . status , url )
return None
res_text = await res . text ( )
except ( aiohttp . client_exceptions . ClientError , asyncio . TimeoutError ) as exc :
if exceptions :
raise exc
return None
if astext :
return res_text
try :
return json . loads ( res_text )
except json . decoder . JSONDecodeError :
if res_text . strip ( " " ) == "" :
return None
_LOGGER . error ( "Could not decode %s [%s]" , res_text , url )
|
def select_upstream ( self , device : devicetools . Device ) -> 'Selection' :
"""Restrict the current selection to the network upstream of the given
starting point , including the starting point itself .
See the documentation on method | Selection . search _ upstream | for
additional information ."""
|
upstream = self . search_upstream ( device )
self . nodes = upstream . nodes
self . elements = upstream . elements
return self
|
def assemble_common_meta ( common_meta_dfs , fields_to_remove , sources , remove_all_metadata_fields , error_report_file ) :
"""Assemble the common metadata dfs together . Both indices are sorted .
Fields that are not in all the dfs are dropped .
Args :
common _ meta _ dfs ( list of pandas dfs )
fields _ to _ remove ( list of strings ) : fields to be removed from the
common metadata because they don ' t agree across files
Returns :
all _ meta _ df _ sorted ( pandas df )"""
|
all_meta_df , all_meta_df_with_dups = build_common_all_meta_df ( common_meta_dfs , fields_to_remove , remove_all_metadata_fields )
if not all_meta_df . index . is_unique :
all_report_df = build_mismatched_common_meta_report ( [ x . shape for x in common_meta_dfs ] , sources , all_meta_df , all_meta_df_with_dups )
unique_duplicate_ids = all_report_df . index . unique ( )
if error_report_file is not None :
all_report_df . to_csv ( error_report_file , sep = "\t" )
msg = """There are inconsistencies in common_metadata_df between different files. Try excluding metadata fields
using the fields_to_remove argument. unique_duplicate_ids: {}
all_report_df:
{}""" . format ( unique_duplicate_ids , all_report_df )
raise MismatchCommonMetadataConcatException ( msg )
# Finally , sort the index
all_meta_df_sorted = all_meta_df . sort_index ( axis = 0 )
return all_meta_df_sorted
|
def run_cmake ( command , build_path , default_build_path ) :
"""Execute CMake command ."""
|
from subprocess import Popen , PIPE
from shutil import rmtree
topdir = os . getcwd ( )
p = Popen ( command , shell = True , stdin = PIPE , stdout = PIPE , stderr = PIPE )
stdout_coded , stderr_coded = p . communicate ( )
stdout = stdout_coded . decode ( 'UTF-8' )
stderr = stderr_coded . decode ( 'UTF-8' )
# print cmake output to screen
print ( stdout )
if stderr : # we write out stderr but we do not stop yet
# this is because CMake warnings are sent to stderr
# and they might be benign
sys . stderr . write ( stderr )
# write cmake output to file
with open ( os . path . join ( build_path , 'cmake_output' ) , 'w' ) as f :
f . write ( stdout )
# change directory and return
os . chdir ( topdir )
# to figure out whether configuration was a success
# we check for 3 sentences that should be part of stdout
configuring_done = '-- Configuring done' in stdout
generating_done = '-- Generating done' in stdout
build_files_written = '-- Build files have been written to' in stdout
configuration_successful = configuring_done and generating_done and build_files_written
if configuration_successful :
save_setup_command ( sys . argv , build_path )
print_build_help ( build_path , default_build_path )
else :
if ( build_path == default_build_path ) : # remove build _ path iff not set by the user
# otherwise removal can be dangerous
rmtree ( default_build_path )
|
def merge ( * maps ) :
"""Merge all maps left to right"""
|
copies = map ( deepcopy , maps )
return reduce ( lambda acc , val : acc . update ( val ) or acc , copies )
|
def column_describe ( table_name , col_name ) :
"""Return summary statistics of a column as JSON .
Uses Pandas ' " split " JSON format ."""
|
col_desc = orca . get_table ( table_name ) . get_column ( col_name ) . describe ( )
return ( col_desc . to_json ( orient = 'split' ) , 200 , { 'Content-Type' : 'application/json' } )
|
def loss ( params , batch , model_predict , rng ) :
"""Calculate loss ."""
|
inputs , targets = batch
predictions = model_predict ( inputs , params , rng = rng )
predictions , targets = _make_list ( predictions , targets )
xent = [ ]
for ( pred , target ) in zip ( predictions , targets ) :
xent . append ( np . sum ( pred * layers . one_hot ( target , pred . shape [ - 1 ] ) , axis = - 1 ) )
return - masked_mean ( xent , targets )
|
def to_segwizard ( segs , target , header = True , coltype = LIGOTimeGPS ) :
"""Write the given ` SegmentList ` to a file in SegWizard format .
Parameters
segs : : class : ` ~ gwpy . segments . SegmentList `
The list of segments to write .
target : ` file ` , ` str `
An open file , or file path , to which to write .
header : ` bool ` , optional
Print a column header into the file , default : ` True ` .
coltype : ` type ` , optional
The numerical type in which to cast times before printing .
Notes
This method is adapted from original code written by Kipp Cannon and
distributed under GPLv3."""
|
# write file path
if isinstance ( target , string_types ) :
with open ( target , 'w' ) as fobj :
return to_segwizard ( segs , fobj , header = header , coltype = coltype )
# write file object
if header :
print ( '# seg\tstart\tstop\tduration' , file = target )
for i , seg in enumerate ( segs ) :
a = coltype ( seg [ 0 ] )
b = coltype ( seg [ 1 ] )
c = float ( b - a )
print ( '\t' . join ( map ( str , ( i , a , b , c ) ) ) , file = target , )
|
def post ( self , url , data , proto = 'http' , form_name = None ) :
"""Load an url using the POST method .
Keyword arguments :
url - - the Universal Resource Location
data - - the form to be sent
proto - - the protocol ( default ' http ' )
form _ name - - the form name to search the default values"""
|
form = self . translator . fill_form ( self . last_response_soup , form_name if form_name else url , data )
self . last_response = self . session . post ( proto + self . base_uri + url , headers = self . headers , cookies = self . cookies , data = form , allow_redirects = True , verify = self . verify )
return self . last_response_soup
|
def loadXml ( self , xdata , filepath = '' ) :
"""Loads properties from the xml data .
: param xdata | < xml . etree . ElementTree . Element >"""
|
# build options
opts = { 'platform' : sys . platform }
mkpath = lambda x : _mkpath ( filepath , x , ** opts )
# lookup environment variables
xenv = xdata . find ( 'environment' )
if xenv is not None :
env = { }
log . info ( 'loading environment...' )
for xkey in xenv :
text = xkey . text
if text :
env [ xkey . tag ] = os . path . expandvars ( text )
else :
env [ xkey . tag ] = ''
self . setEnvironment ( env )
# lookup general settings
xsettings = xdata . find ( 'settings' )
if xsettings is not None :
for xsetting in xsettings :
key = xsetting . tag
val = xsetting . text
attr = '_' + key
if hasattr ( self , attr ) :
setattr ( self , attr , val )
# lookup options
xoptions = xdata . find ( 'options' )
if xoptions is not None :
options = 0
for xopt in xoptions :
key = xopt . tag
value = xopt . text
if value . lower ( ) == 'true' :
try :
options |= Builder . Options [ key ]
except KeyError :
continue
self . _options = options
# lookup path options
xpaths = xdata . find ( 'paths' )
if xpaths is not None :
for xpath in xpaths :
key = xpath . tag
path = xpath . text
if key . endswith ( 'Paths' ) :
path = map ( mkpath , path . split ( ';' ) )
else :
path = mkpath ( path )
setattr ( self , '_' + key , path )
# lookup executable options
xexe = xdata . find ( 'executable' )
if xexe is not None :
exe_tags = { 'runtime' : '_runtime' , 'exe' : '_executableName' , 'cli' : '_executableCliName' , 'product' : '_productName' }
for tag , prop in exe_tags . items ( ) :
xtag = xexe . find ( tag )
if xtag is not None :
value = xtag . text
if value . startswith ( '.' ) :
value = mkpath ( value )
setattr ( self , prop , value )
# load exclude options
xexcludes = xexe . find ( 'excludes' )
if xexcludes is not None :
excludes = [ ]
for xexclude in xexcludes :
excludes . append ( xexclude . text )
self . setExecutableExcludes ( excludes )
# load build data
xexedata = xexe . find ( 'data' )
if xexedata is not None :
data = [ ]
for xentry in xexedata :
if xentry . tag == 'tree' :
path = xentry . get ( 'path' , '' )
if path :
path = mkpath ( path )
else :
path = self . sourcePath ( )
prefix = xentry . get ( 'prefix' , os . path . basename ( path ) )
excludes = xentry . get ( 'excludes' , '' ) . split ( ';' )
if excludes :
data . append ( ( 'tree' , ( path , prefix , excludes ) ) )
else :
for xitem in xentry :
data . append ( ( xentry . tag , xitem . attrs ) )
self . setExecutableData ( data )
# load hidden imports
xhiddenimports = xexe . find ( 'hiddenimports' )
if xhiddenimports is not None :
imports = [ ]
for ximport in xhiddenimports :
imports . append ( ximport . text )
self . setHiddenImports ( imports )
# load options
xopts = xexe . find ( 'options' )
if xopts is not None :
for xopt in xopts :
if xopt . text . startswith ( '.' ) :
value = mkpath ( xopt . text )
else :
value = xopt . text
self . _executableOptions [ xopt . tag ] = value
# lookup installer options
xinstall = xdata . find ( 'installer' )
if xinstall is not None :
install_tags = { 'name' : '_installName' }
for tag , prop in install_tags . items ( ) :
xtag = xinstall . find ( tag )
if xtag is not None :
value = xtag . text
if value . startswith ( '.' ) :
value = mkpath ( value )
setattr ( self , prop , value )
xopts = xinstall . find ( 'options' )
if xopts is not None :
for xopt in xopts :
if xopt . text . startswith ( '.' ) :
value = mkpath ( xopt . text )
else :
value = xopt . text
self . _installerOptions [ xopt . tag ] = value
xdirectories = xinstall . find ( 'additional_directories' )
if xdirectories is not None :
for xdir in xdirectories :
self . _installDirectories [ xdir . get ( 'path' ) ] = xdir . get ( 'source' , '' )
|
def _handle_human_connection_event ( self , event : events . HumanConnectionEvent , ) -> None :
"""Not thread - safe ."""
|
for subscriber in self . _human_connection_subscribers :
try :
subscriber ( event )
except Exception :
LOG . exception ( self . _prefix_log_message ( f"failed to send human connection event {event} to " f"subscriber {subscriber}" ) )
|
def quantize ( self , value ) :
"""Quantize the decimal value to the configured precision ."""
|
context = decimal . getcontext ( ) . copy ( )
context . prec = self . max_digits
return value . quantize ( decimal . Decimal ( '.1' ) ** self . decimal_places , context = context )
|
def _set_hierarchy_view ( self , session ) :
"""Sets the underlying hierarchy view to match current view"""
|
if self . _hierarchy_view == FEDERATED :
try :
session . use_federated_hierarchy_view ( )
except AttributeError :
pass
else :
try :
session . use_isolated_hierarchy_view ( )
except AttributeError :
pass
|
def set_share_properties ( self , share_name , quota , timeout = None ) :
'''Sets service - defined properties for the specified share .
: param str share _ name :
Name of existing share .
: param int quota :
Specifies the maximum size of the share , in gigabytes . Must be
greater than 0 , and less than or equal to 5 TB ( 5120 GB ) .
: param int timeout :
The timeout parameter is expressed in seconds .'''
|
_validate_not_none ( 'share_name' , share_name )
_validate_not_none ( 'quota' , quota )
request = HTTPRequest ( )
request . method = 'PUT'
request . host = self . _get_host ( )
request . path = _get_path ( share_name )
request . query = [ ( 'restype' , 'share' ) , ( 'comp' , 'properties' ) , ( 'timeout' , _int_to_str ( timeout ) ) , ]
request . headers = [ ( 'x-ms-share-quota' , _int_to_str ( quota ) ) ]
self . _perform_request ( request )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.