signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def _responses ( self , request ) :
"""internal API , returns an iterator with all responses matching
the request ."""
|
request = self . _before_record_request ( request )
for index , ( stored_request , response ) in enumerate ( self . data ) :
if requests_match ( request , stored_request , self . _match_on ) :
yield index , response
|
def extract_text ( fpath ) :
"""Extracts structured text content from a plain - text file at ` ` fpath ` ` .
Parameters
fpath : str
Path to the text file . .
Returns
: class : ` . StructuredFeature `
A : class : ` . StructuredFeature ` that contains sentence context ."""
|
with codecs . open ( fpath , 'r' ) as f : # Determine the encoding of the file .
document = f . read ( )
encoding = chardet . detect ( document ) [ 'encoding' ]
document = document . decode ( encoding )
tokens = [ ]
sentences = [ ]
i = 0
for sentence in nltk . tokenize . sent_tokenize ( document ) :
sentences . append ( i )
for word in nltk . tokenize . word_tokenize ( sentence ) :
tokens . append ( word )
i += 1
contexts = [ ( 'sentence' , sentences ) ]
return StructuredFeature ( tokens , contexts )
|
def add_jpeg_decoding ( module_spec ) :
"""Adds operations that perform JPEG decoding and resizing to the graph . .
Args :
module _ spec : The hub . ModuleSpec for the image module being used .
Returns :
Tensors for the node to feed JPEG data into , and the output of the
preprocessing steps ."""
|
input_height , input_width = hub . get_expected_image_size ( module_spec )
input_depth = hub . get_num_image_channels ( module_spec )
jpeg_data = tf . placeholder ( tf . string , name = 'DecodeJPGInput' )
decoded_image = tf . image . decode_jpeg ( jpeg_data , channels = input_depth )
# Convert from full range of uint8 to range [ 0,1 ] of float32.
decoded_image_as_float = tf . image . convert_image_dtype ( decoded_image , tf . float32 )
decoded_image_4d = tf . expand_dims ( decoded_image_as_float , 0 )
resize_shape = tf . stack ( [ input_height , input_width ] )
resize_shape_as_int = tf . cast ( resize_shape , dtype = tf . int32 )
resized_image = tf . image . resize_bilinear ( decoded_image_4d , resize_shape_as_int )
return jpeg_data , resized_image
|
def encode_example ( self , bbox ) :
"""See base class for details ."""
|
# Validate the coordinates
for coordinate in bbox :
if not isinstance ( coordinate , float ) :
raise ValueError ( 'BBox coordinates should be float. Got {}.' . format ( bbox ) )
if not 0.0 <= coordinate <= 1.0 :
raise ValueError ( 'BBox coordinates should be between 0 and 1. Got {}.' . format ( bbox ) )
if bbox . xmax < bbox . xmin or bbox . ymax < bbox . ymin :
raise ValueError ( 'BBox coordinates should have min <= max. Got {}.' . format ( bbox ) )
return super ( BBoxFeature , self ) . encode_example ( [ bbox . ymin , bbox . xmin , bbox . ymax , bbox . xmax ] )
|
def has_common ( self , other ) :
"""Return set of common words between two word sets ."""
|
if not isinstance ( other , WordSet ) :
raise ValueError ( 'Can compare only WordSets' )
return self . term_set & other . term_set
|
def p_expression_div ( self , p ) :
'expression : expression DIVIDE expression'
|
p [ 0 ] = Divide ( p [ 1 ] , p [ 3 ] , lineno = p . lineno ( 1 ) )
p . set_lineno ( 0 , p . lineno ( 1 ) )
|
def _add_resources ( data , runtime ) :
"""Merge input resources with current CWL runtime parameters ."""
|
if "config" not in data :
data [ "config" ] = { }
# Convert input resources , which may be a JSON string
resources = data . get ( "resources" , { } ) or { }
if isinstance ( resources , six . string_types ) and resources . startswith ( ( "{" , "[" ) ) :
resources = json . loads ( resources )
data [ "resources" ] = resources
assert isinstance ( resources , dict ) , ( resources , data )
data [ "config" ] [ "resources" ] = resources
# Add in memory and core usage from CWL
memory = int ( float ( runtime [ "ram" ] ) / float ( runtime [ "cores" ] ) )
data [ "config" ] [ "resources" ] . update ( { "default" : { "cores" : int ( runtime [ "cores" ] ) , "memory" : "%sM" % memory , "jvm_opts" : [ "-Xms%sm" % min ( 1000 , memory // 2 ) , "-Xmx%sm" % memory ] } } )
data [ "config" ] [ "algorithm" ] [ "num_cores" ] = int ( runtime [ "cores" ] )
return data
|
def add_variation ( self , order = 1 , first_order = None , first_order_2 = None , testparticle = - 1 ) :
"""This function adds a set of variational particles to the simulation .
If there are N real particles in the simulation , this functions adds N additional variational
particles . To see how many particles ( real and variational ) are in a simulation , use ` ` ' sim . N ' ` ` .
To see how many variational particles are in a simulation use ` ` ' sim . N _ var ' ` ` .
Currently Leapfrog , WHFast and IAS15 support first order variational equations . IAS15 also
supports second order variational equations .
Parameters
order : integer , optional
By default the function adds a set of first order variational particles to the simulation . Set this flag to 2 for second order .
first _ order : Variation , optional
Second order variational equations depend on their corresponding first order variational equations .
This parameter expects the Variation object corresponding to the first order variational equations .
first _ order _ 2 : Variation , optional
Same as first _ order . But allows to set two different indicies to calculate off - diagonal elements .
If omitted , then first _ order will be used for both first order equations .
testparticle : int , optional
If set to a value > = 0 , then only one variational particle will be added and be treated as a test particle .
Returns
Returns Variation object ( a copy - - you can only modify it through its particles property or vary method ) ."""
|
cur_var_config_N = self . var_config_N
if order == 1 :
index = clibrebound . reb_add_var_1st_order ( byref ( self ) , c_int ( testparticle ) )
elif order == 2 :
if first_order is None :
raise AttributeError ( "Please specify corresponding first order variational equations when initializing second order variational equations." )
if first_order_2 is None :
first_order_2 = first_order
index = clibrebound . reb_add_var_2nd_order ( byref ( self ) , c_int ( testparticle ) , c_int ( first_order . index ) , c_int ( first_order_2 . index ) )
else :
raise AttributeError ( "Only variational equations of first and second order are supported." )
# Need a copy because location of original might shift if more variations added
s = Variation . from_buffer_copy ( self . var_config [ cur_var_config_N ] )
return s
|
def set_queue_acl ( self , queue_name , signed_identifiers = None , timeout = None ) :
'''Sets stored access policies for the queue that may be used with Shared
Access Signatures .
When you set permissions for a queue , the existing permissions are replaced .
To update the queue ' s permissions , call : func : ` ~ get _ queue _ acl ` to fetch
all access policies associated with the queue , modify the access policy
that you wish to change , and then call this function with the complete
set of data to perform the update .
When you establish a stored access policy on a queue , it may take up to
30 seconds to take effect . During this interval , a shared access signature
that is associated with the stored access policy will throw an
: class : ` AzureHttpError ` until the access policy becomes active .
: param str queue _ name :
The name of an existing queue .
: param signed _ identifiers :
A dictionary of access policies to associate with the queue . The
dictionary may contain up to 5 elements . An empty dictionary
will clear the access policies set on the service .
: type signed _ identifiers : dict ( str , : class : ` ~ azure . storage . common . models . AccessPolicy ` )
: param int timeout :
The server timeout , expressed in seconds .'''
|
_validate_not_none ( 'queue_name' , queue_name )
_validate_access_policies ( signed_identifiers )
request = HTTPRequest ( )
request . method = 'PUT'
request . host_locations = self . _get_host_locations ( )
request . path = _get_path ( queue_name )
request . query = { 'comp' : 'acl' , 'timeout' : _int_to_str ( timeout ) , }
request . body = _get_request_body ( _convert_signed_identifiers_to_xml ( signed_identifiers ) )
self . _perform_request ( request )
|
def deallocate_network_ipv4 ( self , id_network_ipv4 ) :
"""Deallocate all relationships between NetworkIPv4.
: param id _ network _ ipv4 : ID for NetworkIPv4
: return : Nothing
: raise InvalidParameterError : Invalid ID for NetworkIPv4.
: raise NetworkIPv4NotFoundError : NetworkIPv4 not found .
: raise DataBaseError : Networkapi failed to access the database .
: raise XMLError : Networkapi failed to generate the XML response ."""
|
if not is_valid_int_param ( id_network_ipv4 ) :
raise InvalidParameterError ( u'The identifier of NetworkIPv4 is invalid or was not informed.' )
url = 'network/ipv4/' + str ( id_network_ipv4 ) + '/deallocate/'
code , xml = self . submit ( None , 'DELETE' , url )
return self . response ( code , xml )
|
def read_cyc ( this , fn , conv = 1.0 ) :
"""Read the lattice information from a cyc . dat file ( i . e . , tblmd input file )"""
|
f = paropen ( fn , "r" )
f . readline ( )
f . readline ( )
f . readline ( )
f . readline ( )
cell = np . array ( [ [ 0.0 , 0.0 , 0.0 ] , [ 0.0 , 0.0 , 0.0 ] , [ 0.0 , 0.0 , 0.0 ] ] )
l = f . readline ( )
s = map ( float , l . split ( ) )
cell [ 0 , 0 ] = s [ 0 ] * conv
cell [ 1 , 0 ] = s [ 1 ] * conv
cell [ 2 , 0 ] = s [ 2 ] * conv
l = f . readline ( )
s = map ( float , l . split ( ) )
cell [ 0 , 1 ] = s [ 0 ] * conv
cell [ 1 , 1 ] = s [ 1 ] * conv
cell [ 2 , 1 ] = s [ 2 ] * conv
l = f . readline ( )
s = map ( float , l . split ( ) )
cell [ 0 , 2 ] = s [ 0 ] * conv
cell [ 1 , 2 ] = s [ 1 ] * conv
cell [ 2 , 2 ] = s [ 2 ] * conv
this . set_cell ( cell )
this . set_pbc ( True )
f . close ( )
|
def follow ( resources , ** kwargs ) :
"""Follow publications involved with resources ."""
|
# subscribe
client = redis . Redis ( decode_responses = True , ** kwargs )
resources = resources if resources else find_resources ( client )
channels = [ Keys . EXTERNAL . format ( resource ) for resource in resources ]
if resources :
subscription = Subscription ( client , * channels )
# listen
while resources :
try :
message = subscription . listen ( )
if message [ 'type' ] == 'message' :
print ( message [ 'data' ] )
except KeyboardInterrupt :
break
|
def file_exists ( self , filename , directory = False , note = None , loglevel = logging . DEBUG ) :
"""Return True if file exists on the target host , else False
@ param filename : Filename to determine the existence of .
@ param directory : Indicate that the file is a directory .
@ param note : See send ( )
@ type filename : string
@ type directory : boolean
@ rtype : boolean"""
|
shutit = self . shutit
shutit . handle_note ( note , 'Looking for filename in current environment: ' + filename )
test_type = '-d' if directory is True else '-e' if directory is None else '-a'
# v the space is intentional , to avoid polluting bash history .
test = ' test %s %s' % ( test_type , filename )
output = self . send_and_get_output ( test + ' && echo FILEXIST-""FILFIN || echo FILNEXIST-""FILFIN' , record_command = False , echo = False , loglevel = loglevel )
res = shutit . match_string ( output , '^(FILEXIST|FILNEXIST)-FILFIN$' )
ret = False
if res == 'FILEXIST' :
ret = True
elif res == 'FILNEXIST' :
pass
else : # pragma : no cover
# Change to log ?
shutit . log ( repr ( 'before>>>>:%s<<<< after:>>>>%s<<<<' % ( self . pexpect_child . before , self . pexpect_child . after ) ) , transient = True , level = logging . INFO )
shutit . fail ( 'Did not see FIL(N)?EXIST in output:\n' + output )
shutit . handle_note_after ( note = note )
return ret
|
def match_start_date ( self , start , end , match ) :
"""Matches temporals whose start date falls in between the given dates inclusive .
arg : start ( osid . calendaring . DateTime ) : start of date range
arg : end ( osid . calendaring . DateTime ) : end of date range
arg : match ( boolean ) : ` ` true ` ` if a positive match , ` ` false ` `
for a negative match
raise : InvalidArgument - ` ` start ` ` is less than ` ` end ` `
raise : NullArgument - ` ` start ` ` or ` ` end ` ` is ` ` null ` `
* compliance : mandatory - - This method must be implemented . *"""
|
if match :
if end < start :
raise errors . InvalidArgument ( 'end date must be >= start date when match = True' )
self . _query_terms [ 'startDate' ] = { '$gte' : start , '$lte' : end }
else :
raise errors . InvalidArgument ( 'match = False not currently supported' )
|
def create_widget ( self ) :
"""Create the underlying widget ."""
|
d = self . declaration
self . widget = EditText ( self . get_context ( ) , None , d . style or "@attr/editTextStyle" )
|
def kmer_lca_records ( seqs_path , one_codex_api_key : 'One Codex API key' = None , fastq : 'input is fastq; disable autodetection' = False , progress : 'show progress bar (sent to stderr)' = False ) :
'''Parallel lowest common ancestor sequence classification of fasta / q using the One Codex API .
Returns Biopython SeqRecords with tictax annotations as the ` description ` attribute .
LCAs are assigned using an LCA index of 31mers from the One Codex database .'''
|
records = parse_seqs ( seqs_path , fastq )
one_codex_api_key = one_codex_api_key if one_codex_api_key else config ( ) [ 'one_codex_api_key' ]
print ( 'Classifying sequences…' , file = sys . stderr )
records = asyncio . get_event_loop ( ) . run_until_complete ( oc_classify ( records , one_codex_api_key , progress , False ) )
return records
|
async def set_message ( self , text = None , reply_to = 0 , parse_mode = ( ) , link_preview = None ) :
"""Changes the draft message on the Telegram servers . The changes are
reflected in this object .
: param str text : New text of the draft .
Preserved if left as None .
: param int reply _ to : Message ID to reply to .
Preserved if left as 0 , erased if set to None .
: param bool link _ preview : Whether to attach a web page preview .
Preserved if left as None .
: param str parse _ mode : The parse mode to be used for the text .
: return bool : ` ` True ` ` on success ."""
|
if text is None :
text = self . _text
if reply_to == 0 :
reply_to = self . reply_to_msg_id
if link_preview is None :
link_preview = self . link_preview
raw_text , entities = await self . _client . _parse_message_text ( text , parse_mode )
result = await self . _client ( SaveDraftRequest ( peer = self . _peer , message = raw_text , no_webpage = not link_preview , reply_to_msg_id = reply_to , entities = entities ) )
if result :
self . _text = text
self . _raw_text = raw_text
self . link_preview = link_preview
self . reply_to_msg_id = reply_to
self . date = datetime . datetime . now ( tz = datetime . timezone . utc )
return result
|
def build_headermap ( headers ) :
"""Construct dictionary { header _ file : set _ of _ included _ files } .
This function operates on " real " set of includes , in the sense that it
parses each header file to check which files are included from there ."""
|
# TODO : what happens if some headers are circularly dependent ?
headermap = { }
for hfile in headers :
headermap [ hfile ] = None
for hfile in headers :
assert ( hfile . startswith ( "c/" ) or hfile . startswith ( "datatable/include/" ) )
inc = find_includes ( hfile )
for f in inc :
assert f != hfile , "File %s includes itself?" % f
assert f . startswith ( "c/" )
if f not in headers :
raise ValueError ( "Unknown header \"%s\" included from %s" % ( f , hfile ) )
headermap [ hfile ] = set ( inc )
return headermap
|
def make ( self , host = "localhost" , port = 8082 , protocol = "http" , base_uri = "" , os_auth_type = "http" , ** kwargs ) :
"""Initialize a session to Contrail API server
: param os _ auth _ type : auth plugin to use :
- http : basic HTTP authentification
- v2password : keystone v2 auth
- v3password : keystone v3 auth
: type os _ auth _ type : str"""
|
loader = loading . base . get_plugin_loader ( os_auth_type )
plugin_options = { opt . dest : kwargs . pop ( "os_%s" % opt . dest ) for opt in loader . get_options ( ) if 'os_%s' % opt . dest in kwargs }
plugin = loader . load_from_options ( ** plugin_options )
return self . load_from_argparse_arguments ( Namespace ( ** kwargs ) , host = host , port = port , protocol = protocol , base_uri = base_uri , auth = plugin )
|
def get_client ( self , client_type ) :
"""get _ client ."""
|
if client_type not in self . _client_cache :
client_class = self . _get_class ( client_type )
self . _client_cache [ client_type ] = self . _get_client_instance ( client_class )
return self . _client_cache [ client_type ]
|
def recursive_directory_listing ( log , baseFolderPath , whatToList = "all" ) :
"""* list directory contents recursively . *
Options to list only files or only directories .
* * Key Arguments : * *
- ` ` log ` ` - - logger
- ` ` baseFolderPath ` ` - - path to the base folder to list contained files and folders recursively
- ` ` whatToList ` ` - - list files only , durectories only or all [ " files " | " dirs " | " all " ]
* * Return : * *
- ` ` matchedPathList ` ` - - the matched paths
* * Usage : * *
. . code - block : : python
from fundamentals . files import recursive _ directory _ listing
theseFiles = recursive _ directory _ listing (
log ,
baseFolderPath = " / tmp "
# OR JUST FILE
from fundamentals . files import recursive _ directory _ listing
theseFiles = recursive _ directory _ listing (
log ,
baseFolderPath = " / tmp " ,
whatToList = " files "
# OR JUST FOLDERS
from fundamentals . files import recursive _ directory _ listing
theseFiles = recursive _ directory _ listing (
log ,
baseFolderPath = " / tmp " ,
whatToList = " dirs "
print theseFiles"""
|
log . debug ( 'starting the ``recursive_directory_listing`` function' )
# # VARIABLES # #
matchedPathList = [ ]
parentDirectoryList = [ baseFolderPath , ]
count = 0
while os . listdir ( baseFolderPath ) and count < 20 :
count += 1
while len ( parentDirectoryList ) != 0 :
childDirList = [ ]
for parentDir in parentDirectoryList :
try :
thisDirList = os . listdir ( parentDir )
except Exception , e :
log . error ( e )
continue
for d in thisDirList :
fullPath = os . path . join ( parentDir , d )
if whatToList is "all" :
matched = True
elif whatToList is "dirs" :
matched = os . path . isdir ( fullPath )
elif whatToList is "files" :
matched = os . path . isfile ( fullPath )
else :
log . error ( 'cound not list files in %s, `whatToList` variable incorrect: [ "files" | "dirs" | "all" ]' % ( baseFolderPath , ) )
sys . exit ( 0 )
if matched :
matchedPathList . append ( fullPath )
# UPDATE DIRECTORY LISTING
if os . path . isdir ( fullPath ) :
childDirList . append ( fullPath )
parentDirectoryList = childDirList
log . debug ( 'completed the ``recursive_directory_listing`` function' )
return matchedPathList
|
def update ( did ) :
"""Update DDO of an existing asset
tags :
- ddo
consumes :
- application / json
parameters :
- in : body
name : body
required : true
description : DDO of the asset .
schema :
type : object
required :
- " @ context "
- created
- id
- publicKey
- authentication
- proof
- service
properties :
" @ context " :
description :
example : https : / / w3id . org / future - method / v1
type : string
id :
description : ID of the asset .
example : did : op : 123456789abcdefghi
type : string
created :
description : date of ddo creation .
example : " 2016-02-08T16:02:20Z "
type : string
publicKey :
type : array
description : List of public keys .
example : [ { " id " : " did : op : 123456789abcdefghi # keys - 1 " } ,
{ " type " : " Ed25519VerificationKey2018 " } ,
{ " owner " : " did : op : 123456789abcdefghi " } ,
{ " publicKeyBase58 " : " H3C2AVvLMv6gmMNam3uVAjZpfkcJCwDwnZn6z3wXmqPV " } ]
authentication :
type : array
description : List of authentication mechanisms .
example : [ { " type " : " RsaSignatureAuthentication2018 " } ,
{ " publicKey " : " did : op : 123456789abcdefghi # keys - 1 " } ]
proof :
type : dictionary
description : Information about the creation and creator of the asset .
example : { " type " : " UUIDSignature " ,
" created " : " 2016-02-08T16:02:20Z " ,
" creator " : " did : example : 8uQhQMGzWxR8vw5P3UWH1ja " ,
" signatureValue " : " QNB13Y7Q9 . . . 1tzjn4w = = "
service :
type : array
description : List of services .
example : [ { " type " : " Access " ,
" serviceEndpoint " :
" http : / / mybrizo . org / api / v1 / brizo / services / consume ? pubKey = $ {
pubKey } & serviceId = { serviceId } & url = { url } " } ,
{ " type " : " Compute " ,
" serviceEndpoint " :
" http : / / mybrizo . org / api / v1 / brizo / services / compute ? pubKey = $ {
pubKey } & serviceId = { serviceId } & algo = { algo } & container = { container } " } ,
" type " : " Metadata " ,
" serviceDefinitionId " : " 2 " ,
" serviceEndpoint " :
" http : / / myaquarius . org / api / v1 / provider / assets / metadata / { did } " ,
" metadata " : {
" base " : {
" name " : " UK Weather information 2011 " ,
" type " : " dataset " ,
" description " : " Weather information of UK including
temperature and humidity " ,
" dateCreated " : " 2012-02-01T10:55:11Z " ,
" author " : " Met Office " ,
" license " : " CC - BY " ,
" copyrightHolder " : " Met Office " ,
" compression " : " zip " ,
" workExample " : " stationId , latitude , longitude , datetime ,
temperature , humidity / n423432fsd , 51.509865 , - 0.118092,
2011-01-01T10:55:11 + 00:00,7.2,68 " ,
" files " : [ {
" contentLength " : " 4535431 " ,
" contentType " : " text / csv " ,
" encoding " : " UTF - 8 " ,
" compression " : " zip " ,
" resourceId " : " access - log2018-02-13-15-17-29-18386C502CAEA932"
" encryptedFiles " : " 0x098213xzckasdf089723hjgdasfkjgasfv " ,
" links " : [ {
" name " : " Sample of Asset Data " ,
" type " : " sample " ,
" url " : " https : / / foo . com / sample . csv "
" name " : " Data Format Definition " ,
" type " : " format " ,
" AssetID " :
"4d517500da0acb0d65a716f61330969334630363ce4a6a9d39691026ac7908ea "
" inLanguage " : " en " ,
" tags " : " weather , uk , 2011 , temperature , humidity " ,
" price " : 10,
" checksum " :
"38803b9e6f04fce3fba4b124524672592264d31847182c689095a081c9e85262"
" curation " : {
" rating " : 0.93,
" numVotes " : 123,
" schema " : " Binary Voting "
" additionalInformation " : {
" updateFrecuency " : " yearly " ,
" structuredMarkup " : [ {
" uri " : " http : / / skos . um . es / unescothes / C01194 / jsonld " ,
" mediaType " : " application / ld + json "
" uri " : " http : / / skos . um . es / unescothes / C01194 / turtle " ,
" mediaType " : " text / turtle "
responses :
200:
description : Asset successfully updated .
201:
description : Asset successfully registered .
400:
description : One of the required attributes is missing .
404:
description : Invalid asset data .
500:
description : Error"""
|
required_attributes = [ '@context' , 'created' , 'id' , 'publicKey' , 'authentication' , 'proof' , 'service' ]
required_metadata_base_attributes = [ 'name' , 'dateCreated' , 'author' , 'license' , 'price' , 'encryptedFiles' , 'type' , 'checksum' ]
required_metadata_curation_attributes = [ 'rating' , 'numVotes' ]
assert isinstance ( request . json , dict ) , 'invalid payload format.'
data = request . json
if not data :
logger . error ( f'request body seems empty, expecting {required_attributes}' )
return 400
msg , status = check_required_attributes ( required_attributes , data , 'update' )
if msg :
return msg , status
msg , status = check_required_attributes ( required_metadata_base_attributes , _get_base_metadata ( data [ 'service' ] ) , 'update' )
if msg :
return msg , status
msg , status = check_required_attributes ( required_metadata_curation_attributes , _get_curation_metadata ( data [ 'service' ] ) , 'update' )
if msg :
return msg , status
msg , status = check_no_urls_in_files ( _get_base_metadata ( data [ 'service' ] ) , 'register' )
if msg :
return msg , status
msg , status = validate_date_format ( data [ 'created' ] )
if msg :
return msg , status
_record = dict ( )
_record = copy . deepcopy ( data )
_record [ 'created' ] = datetime . strptime ( data [ 'created' ] , '%Y-%m-%dT%H:%M:%SZ' )
try :
if dao . get ( did ) is None :
register ( )
return _sanitize_record ( _record ) , 201
else :
for service in _record [ 'service' ] :
service_id = int ( service [ 'serviceDefinitionId' ] )
if service [ 'type' ] == 'Metadata' :
_record [ 'service' ] [ service_id ] [ 'metadata' ] [ 'base' ] [ 'datePublished' ] = _get_date ( dao . get ( did ) [ 'service' ] )
dao . update ( _record , did )
return Response ( _sanitize_record ( _record ) , 200 , content_type = 'application/json' )
except Exception as err :
return f'Some error: {str(err)}' , 500
|
def _center_window ( self , result , window ) :
"""Center the result in the window ."""
|
if self . axis > result . ndim - 1 :
raise ValueError ( "Requested axis is larger then no. of argument " "dimensions" )
offset = _offset ( window , True )
if offset > 0 :
if isinstance ( result , ( ABCSeries , ABCDataFrame ) ) :
result = result . slice_shift ( - offset , axis = self . axis )
else :
lead_indexer = [ slice ( None ) ] * result . ndim
lead_indexer [ self . axis ] = slice ( offset , None )
result = np . copy ( result [ tuple ( lead_indexer ) ] )
return result
|
def _make_builder_configs ( ) :
"""Make built - in Librispeech BuilderConfigs .
Uses 4 text encodings ( plain text , bytes , subwords with 8k vocab , subwords
with 32k vocab ) crossed with the data subsets ( clean100 , clean360 , all ) .
Returns :
` list < tfds . audio . LibrispeechConfig > `"""
|
text_encoder_configs = [ None , tfds . features . text . TextEncoderConfig ( name = "bytes" , encoder = tfds . features . text . ByteTextEncoder ( ) ) , tfds . features . text . TextEncoderConfig ( name = "subwords8k" , encoder_cls = tfds . features . text . SubwordTextEncoder , vocab_size = 2 ** 13 ) , tfds . features . text . TextEncoderConfig ( name = "subwords32k" , encoder_cls = tfds . features . text . SubwordTextEncoder , vocab_size = 2 ** 15 ) , ]
version = "0.1.0"
configs = [ ]
for text_encoder_config in text_encoder_configs :
for data in _DATA_OPTIONS :
config = LibrispeechConfig ( version = version , text_encoder_config = text_encoder_config , data = data )
configs . append ( config )
return configs
|
def do_run ( self , count = 1 ) :
'''Roll count dice , store results . Does all stats so might be slower
than specific doFoo methods . But , it is proly faster than running
each of those seperately to get same stats .
Sets the following properties :
- stats . bucket
- stats . sum
- stats . avr
: param count : Number of rolls to make .'''
|
if not self . roll . summable :
raise Exception ( 'Roll is not summable' )
h = dict ( )
total = 0
for roll in self . roll . x_rolls ( count ) :
total += roll
h [ roll ] = h . get ( roll , 0 ) + 1
self . _bucket = h
self . sum = total
self . avr = total / count
|
def mksls ( src , dst = None ) :
'''Convert a preseed file to an SLS file'''
|
ps_opts = { }
with salt . utils . files . fopen ( src , 'r' ) as fh_ :
for line in fh_ :
line = salt . utils . stringutils . to_unicode ( line )
if line . startswith ( '#' ) :
continue
if not line . strip ( ) :
continue
comps = shlex . split ( line )
if comps [ 0 ] not in ps_opts . keys ( ) :
ps_opts [ comps [ 0 ] ] = { }
cmds = comps [ 1 ] . split ( '/' )
pointer = ps_opts [ comps [ 0 ] ]
for cmd in cmds :
pointer = pointer . setdefault ( cmd , { } )
pointer [ 'type' ] = comps [ 2 ]
if len ( comps ) > 3 :
pointer [ 'argument' ] = comps [ 3 ]
sls = { }
# Set language
# ( This looks like it maps to something else )
sls [ ps_opts [ 'd-i' ] [ 'languagechooser' ] [ 'language-name-fb' ] [ 'argument' ] ] = { 'locale' : [ 'system' ] }
# Set keyboard
# ( This looks like it maps to something else )
sls [ ps_opts [ 'd-i' ] [ 'kbd-chooser' ] [ 'method' ] [ 'argument' ] ] = { 'keyboard' : [ 'system' ] }
# Set timezone
timezone = ps_opts [ 'd-i' ] [ 'time' ] [ 'zone' ] [ 'argument' ]
sls [ timezone ] = { 'timezone' : [ 'system' ] }
if ps_opts [ 'd-i' ] [ 'tzconfig' ] [ 'gmt' ] [ 'argument' ] == 'true' :
sls [ timezone ] [ 'timezone' ] . append ( 'utc' )
# Set network
if 'netcfg' in ps_opts [ 'd-i' ] . keys ( ) :
iface = ps_opts [ 'd-i' ] [ 'netcfg' ] [ 'choose_interface' ] [ 'argument' ]
sls [ iface ] = { }
sls [ iface ] [ 'enabled' ] = True
if ps_opts [ 'd-i' ] [ 'netcfg' ] [ 'confirm_static' ] == 'true' :
sls [ iface ] [ 'proto' ] = 'static'
elif ps_opts [ 'd-i' ] [ 'netcfg' ] [ 'disable_dhcp' ] == 'false' :
sls [ iface ] [ 'proto' ] = 'dhcp'
sls [ iface ] [ 'netmask' ] = ps_opts [ 'd-i' ] [ 'netcfg' ] [ 'get_netmask' ] [ 'argument' ]
sls [ iface ] [ 'domain' ] = ps_opts [ 'd-i' ] [ 'netcfg' ] [ 'get_domain' ] [ 'argument' ]
sls [ iface ] [ 'gateway' ] = ps_opts [ 'd-i' ] [ 'netcfg' ] [ 'get_gateway' ] [ 'argument' ]
sls [ iface ] [ 'hostname' ] = ps_opts [ 'd-i' ] [ 'netcfg' ] [ 'get_hostname' ] [ 'argument' ]
sls [ iface ] [ 'ipaddress' ] = ps_opts [ 'd-i' ] [ 'netcfg' ] [ 'get_ipaddress' ] [ 'argument' ]
sls [ iface ] [ 'nameservers' ] = ps_opts [ 'd-i' ] [ 'netcfg' ] [ 'get_nameservers' ] [ 'argument' ]
if dst is not None :
with salt . utils . files . fopen ( dst , 'w' ) as fh_ :
salt . utils . yaml . safe_dump ( sls , fh_ , default_flow_style = False )
else :
return salt . utils . yaml . safe_dump ( sls , default_flow_style = False )
|
def get_role_id ( self , role_name , mount_point = 'approle' ) :
"""GET / auth / < mount _ point > / role / < role name > / role - id
: param role _ name :
: type role _ name :
: param mount _ point :
: type mount _ point :
: return :
: rtype :"""
|
url = '/v1/auth/{0}/role/{1}/role-id' . format ( mount_point , role_name )
return self . _adapter . get ( url ) . json ( ) [ 'data' ] [ 'role_id' ]
|
def add_highlighted_fits ( self , evnet ) :
"""adds a new interpretation to each specimen highlighted in logger if multiple interpretations are highlighted of the same specimen only one new interpretation is added
@ param : event - > the wx . ButtonEvent that triggered this function"""
|
specimens = [ ]
next_i = self . logger . GetNextSelected ( - 1 )
if next_i == - 1 :
return
while next_i != - 1 :
fit , specimen = self . fit_list [ next_i ]
if specimen in specimens :
next_i = self . logger . GetNextSelected ( next_i )
continue
else :
specimens . append ( specimen )
next_i = self . logger . GetNextSelected ( next_i )
for specimen in specimens :
self . add_fit_to_specimen ( specimen )
self . update_editor ( )
self . parent . update_selection ( )
|
def get_objective_bank_hierarchy_design_session ( self ) :
"""Gets the session designing objective bank hierarchies .
return : ( osid . learning . ObjectiveBankHierarchyDesignSession ) - an
` ` ObjectiveBankHierarchyDesignSession ` `
raise : OperationFailed - unable to complete request
raise : Unimplemented -
` ` supports _ objective _ bank _ hierarchy _ design ( ) is false ` `
* compliance : optional - - This method must be implemented if
` ` supports _ objective _ bank _ hierarchy _ design ( ) ` ` is true . *"""
|
if not self . supports_objective_bank_hierarchy_design ( ) :
raise errors . Unimplemented ( )
# pylint : disable = no - member
return sessions . ObjectiveBankHierarchyDesignSession ( runtime = self . _runtime )
|
def _parse_uri ( uri_as_string ) :
"""Parse the given URI from a string .
Supported URI schemes are :
* file
* hdfs
* http
* https
* s3
* s3a
* s3n
* s3u
* webhdfs
. s3 , s3a and s3n are treated the same way . s3u is s3 but without SSL .
Valid URI examples : :
* s3 : / / my _ bucket / my _ key
* s3 : / / my _ key : my _ secret @ my _ bucket / my _ key
* s3 : / / my _ key : my _ secret @ my _ server : my _ port @ my _ bucket / my _ key
* hdfs : / / / path / file
* hdfs : / / path / file
* webhdfs : / / host : port / path / file
* . / local / path / file
* ~ / local / path / file
* local / path / file
* . / local / path / file . gz
* file : / / / home / user / file
* file : / / / home / user / file . bz2
* [ ssh | scp | sftp ] : / / username @ host / / path / file
* [ ssh | scp | sftp ] : / / username @ host / path / file"""
|
if os . name == 'nt' : # urlsplit doesn ' t work on Windows - - it parses the drive as the scheme . . .
if '://' not in uri_as_string : # no protocol given = > assume a local file
uri_as_string = 'file://' + uri_as_string
parsed_uri = _my_urlsplit ( uri_as_string )
if parsed_uri . scheme == "hdfs" :
return _parse_uri_hdfs ( parsed_uri )
elif parsed_uri . scheme == "webhdfs" :
return _parse_uri_webhdfs ( parsed_uri )
elif parsed_uri . scheme in smart_open_s3 . SUPPORTED_SCHEMES :
return _parse_uri_s3x ( parsed_uri )
elif parsed_uri . scheme == 'file' :
return _parse_uri_file ( parsed_uri . netloc + parsed_uri . path )
elif parsed_uri . scheme in ( '' , None ) :
return _parse_uri_file ( uri_as_string )
elif parsed_uri . scheme . startswith ( 'http' ) :
return Uri ( scheme = parsed_uri . scheme , uri_path = uri_as_string )
elif parsed_uri . scheme in smart_open_ssh . SCHEMES :
return _parse_uri_ssh ( parsed_uri )
else :
raise NotImplementedError ( "unknown URI scheme %r in %r" % ( parsed_uri . scheme , uri_as_string ) )
|
def init ( device_id = None , random_seed = None ) :
"""Initialize Hebel .
This function creates a CUDA context , CUBLAS context and
initializes and seeds the pseudo - random number generator .
* * Parameters : * *
device _ id : integer , optional
The ID of the GPU device to use . If this is omitted , PyCUDA ' s
default context is used , which by default uses the fastest
available device on the system . Alternatively , you can put the
device id in the environment variable ` ` CUDA _ DEVICE ` ` or into
the file ` ` . cuda - device ` ` in the user ' s home directory .
random _ seed : integer , optional
The seed to use for the pseudo - random number generator . If
this is omitted , the seed is taken from the environment
variable ` ` RANDOM _ SEED ` ` and if that is not defined , a random
integer is used as a seed ."""
|
if device_id is None :
random_seed = _os . environ . get ( 'CUDA_DEVICE' )
if random_seed is None :
random_seed = _os . environ . get ( 'RANDOM_SEED' )
global is_initialized
if not is_initialized :
is_initialized = True
global context
context . init_context ( device_id )
from pycuda import gpuarray , driver , curandom
# Initialize memory pool
global memory_pool
memory_pool . init ( )
# Initialize PRG
global sampler
sampler . set_seed ( random_seed )
# Initialize pycuda _ ops
from hebel import pycuda_ops
pycuda_ops . init ( )
|
def transform ( self , translation , theta , method = 'opencv' ) :
"""Create a new image by translating and rotating the current image .
Parameters
translation : : obj : ` numpy . ndarray ` of float
The XY translation vector .
theta : float
Rotation angle in radians , with positive meaning counter - clockwise .
method : : obj : ` str `
Method to use for image transformations ( opencv or scipy )
Returns
: obj : ` Image `
An image of the same type that has been rotated and translated ."""
|
# transform channels separately
color_im_tf = self . color . transform ( translation , theta , method = method )
depth_im_tf = self . depth . transform ( translation , theta , method = method )
# return combination of cropped data
return RgbdImage . from_color_and_depth ( color_im_tf , depth_im_tf )
|
def gen_signature ( priv_path , pub_path , sign_path , passphrase = None ) :
'''creates a signature for the given public - key with
the given private key and writes it to sign _ path'''
|
with salt . utils . files . fopen ( pub_path ) as fp_ :
mpub_64 = fp_ . read ( )
mpub_sig = sign_message ( priv_path , mpub_64 , passphrase )
mpub_sig_64 = binascii . b2a_base64 ( mpub_sig )
if os . path . isfile ( sign_path ) :
return False
log . trace ( 'Calculating signature for %s with %s' , os . path . basename ( pub_path ) , os . path . basename ( priv_path ) )
if os . path . isfile ( sign_path ) :
log . trace ( 'Signature file %s already exists, please remove it first and ' 'try again' , sign_path )
else :
with salt . utils . files . fopen ( sign_path , 'wb+' ) as sig_f :
sig_f . write ( salt . utils . stringutils . to_bytes ( mpub_sig_64 ) )
log . trace ( 'Wrote signature to %s' , sign_path )
return True
|
def is_valid ( self , fast = False ) :
"""Returns validation success or failure as boolean .
Optional fast parameter passed directly to validate ( ) ."""
|
try :
self . validate ( fast = fast )
except BagError :
return False
return True
|
def rm_file_or_dir ( path , ignore_errors = True ) :
"""Helper function to clean a certain filepath
Parameters
path
Returns"""
|
if os . path . exists ( path ) :
if os . path . isdir ( path ) :
if os . path . islink ( path ) :
os . unlink ( path )
else :
shutil . rmtree ( path , ignore_errors = ignore_errors )
else :
if os . path . islink ( path ) :
os . unlink ( path )
else :
os . remove ( path )
|
def file_to_md5 ( filename , block_size = 8192 ) :
"""Calculate the md5 hash of a file . Memory - friendly solution ,
it reads the file piece by piece . See stackoverflow . com / questions / 1131220/
: param filename : filename to convert
: param block _ size : size of block
: return : MD5 hash of file content"""
|
md5 = hashlib . md5 ( )
with open ( filename , 'rb' ) as f :
while True :
data = f . read ( block_size )
if not data :
break
md5 . update ( data )
return md5 . hexdigest ( )
|
def heater_level ( self , value ) :
"""Verifies that the heater _ level is between 0 and heater _ segments .
Can only be called when freshroastsr700 object is initialized
with ext _ sw _ heater _ drive = True . Will throw RoasterValueError
otherwise ."""
|
if self . _ext_sw_heater_drive :
if value not in range ( 0 , self . _heater_bangbang_segments + 1 ) :
raise exceptions . RoasterValueError
self . _heater_level . value = value
else :
raise exceptions . RoasterValueError
|
def _description ( self ) :
"""A concise html explanation of this Action ."""
|
inst = self . timemachine . presently
if self . action_type == "dl" :
return "Deleted %s" % inst . content_type . name
elif self . action_type == "cr" :
return "Created %s" % inst . _object_type_html ( )
else :
return "Modified %s" % inst . _object_type_html ( )
|
def transformToNative ( self ) :
"""Transform this object into a custom VBase subclass .
transformToNative should always return a representation of this object .
It may do so by modifying self in place then returning self , or by
creating a new object ."""
|
if self . isNative or not self . behavior or not self . behavior . hasNative :
return self
else :
try :
return self . behavior . transformToNative ( self )
except Exception as e : # wrap errors in transformation in a ParseError
lineNumber = getattr ( self , 'lineNumber' , None )
if isinstance ( e , ParseError ) :
if lineNumber is not None :
e . lineNumber = lineNumber
raise
else :
msg = "In transformToNative, unhandled exception on line %s: %s: %s"
msg = msg % ( lineNumber , sys . exc_info ( ) [ 0 ] , sys . exc_info ( ) [ 1 ] )
raise ParseError ( msg , lineNumber )
|
def _get_iscsi_settings_resource ( self , data ) :
"""Get the iscsi settings resoure .
: param data : Existing iscsi settings of the server .
: returns : headers , iscsi _ settings url and
iscsi settings as a dictionary .
: raises : IloCommandNotSupportedError , if resource is not found .
: raises : IloError , on an error from iLO ."""
|
try :
iscsi_settings_uri = data [ 'links' ] [ 'Settings' ] [ 'href' ]
except KeyError :
msg = ( 'iscsi settings resource not found.' )
raise exception . IloCommandNotSupportedError ( msg )
status , headers , iscsi_settings = self . _rest_get ( iscsi_settings_uri )
if status != 200 :
msg = self . _get_extended_error ( iscsi_settings )
raise exception . IloError ( msg )
return headers , iscsi_settings_uri , iscsi_settings
|
def post ( method , hmc , uri , uri_parms , body , logon_required , wait_for_completion ) :
"""Operation : Add Candidate Adapter Ports to an FCP Storage Group ."""
|
assert wait_for_completion is True
# async not supported yet
# The URI is a POST operation , so we need to construct the SG URI
storage_group_oid = uri_parms [ 0 ]
storage_group_uri = '/api/storage-groups/' + storage_group_oid
try :
storage_group = hmc . lookup_by_uri ( storage_group_uri )
except KeyError :
raise InvalidResourceError ( method , uri )
check_required_fields ( method , uri , body , [ 'adapter-port-uris' ] )
# TODO : Check that storage group has type FCP
# Reflect the result of adding the candidate ports
candidate_adapter_port_uris = storage_group . properties [ 'candidate-adapter-port-uris' ]
for ap_uri in body [ 'adapter-port-uris' ] :
if ap_uri in candidate_adapter_port_uris :
raise ConflictError ( method , uri , 483 , "Adapter port is already in candidate " "list of storage group %s: %s" % ( storage_group . name , ap_uri ) )
else :
candidate_adapter_port_uris . append ( ap_uri )
|
def symlink_path ( self ) : # type : ( ) - > bytes
'''Get the path as a string of the symlink target of this Rock Ridge entry
( if this is a symlink ) .
Parameters :
None .
Returns :
Symlink path as a string .'''
|
if not self . _initialized :
raise pycdlibexception . PyCdlibInternalError ( 'Rock Ridge extension not yet initialized' )
if not self . is_symlink ( ) :
raise pycdlibexception . PyCdlibInvalidInput ( 'Entry is not a symlink!' )
outlist = [ ]
saved = b''
for rec in self . dr_entries . sl_records + self . ce_entries . sl_records :
if rec . last_component_continued ( ) :
saved += rec . name ( )
else :
saved += rec . name ( )
outlist . append ( saved )
saved = b''
if saved != b'' :
raise pycdlibexception . PyCdlibInvalidISO ( 'Saw a continued symlink record with no end; ISO is probably malformed' )
return b'/' . join ( outlist )
|
def recipe_status ( backend ) :
"""Compare local recipe to remote recipe for the current recipe ."""
|
kitchen = DKCloudCommandRunner . which_kitchen_name ( )
if kitchen is None :
raise click . ClickException ( 'You are not in a Kitchen' )
recipe_dir = DKRecipeDisk . find_recipe_root_dir ( )
if recipe_dir is None :
raise click . ClickException ( 'You must be in a Recipe folder' )
recipe_name = DKRecipeDisk . find_recipe_name ( )
click . secho ( "%s - Getting the status of Recipe '%s' in Kitchen '%s'\n\tversus directory '%s'" % ( get_datetime ( ) , recipe_name , kitchen , recipe_dir ) , fg = 'green' )
check_and_print ( DKCloudCommandRunner . recipe_status ( backend . dki , kitchen , recipe_name , recipe_dir ) )
|
def error ( name = None , message = '' ) :
'''If name is None Then return empty dict
Otherwise raise an exception with _ _ name _ _ from name , message from message
CLI Example :
. . code - block : : bash
salt - wheel error
salt - wheel error . error name = " Exception " message = " This is an error . "'''
|
ret = { }
if name is not None :
salt . utils . error . raise_error ( name = name , message = message )
return ret
|
def extend_schema_spec ( self ) -> None :
"""Injects the identity field"""
|
super ( ) . extend_schema_spec ( )
identity_field = { 'Name' : '_identity' , 'Type' : BtsType . STRING , 'Value' : 'identity' , ATTRIBUTE_INTERNAL : True }
if self . ATTRIBUTE_FIELDS in self . _spec :
self . _spec [ self . ATTRIBUTE_FIELDS ] . insert ( 0 , identity_field )
self . schema_loader . add_schema_spec ( identity_field , self . fully_qualified_name )
|
def _find_usage_ACLs ( self ) :
"""find usage for ACLs"""
|
# Network ACLs per VPC
acls = defaultdict ( int )
for acl in self . conn . describe_network_acls ( ) [ 'NetworkAcls' ] :
acls [ acl [ 'VpcId' ] ] += 1
# Rules per network ACL
self . limits [ 'Rules per network ACL' ] . _add_current_usage ( len ( acl [ 'Entries' ] ) , aws_type = 'AWS::EC2::NetworkAcl' , resource_id = acl [ 'NetworkAclId' ] )
for vpc_id in acls :
self . limits [ 'Network ACLs per VPC' ] . _add_current_usage ( acls [ vpc_id ] , aws_type = 'AWS::EC2::VPC' , resource_id = vpc_id , )
|
def register ( self , event_type , callback , args = None , kwargs = None , details_filter = None , weak = False ) :
"""Register a callback to be called when event of a given type occurs .
Callback will be called with provided ` ` args ` ` and ` ` kwargs ` ` and
when event type occurs ( or on any event if ` ` event _ type ` ` equals to
: attr : ` . ANY ` ) . It will also get additional keyword argument ,
` ` details ` ` , that will hold event details provided to the
: meth : ` . notify ` method ( if a details filter callback is provided then
the target callback will * only * be triggered if the details filter
callback returns a truthy value ) .
: param event _ type : event type to get triggered on
: param callback : function callback to be registered .
: param args : non - keyworded arguments
: type args : list
: param kwargs : key - value pair arguments
: type kwargs : dictionary
: param weak : if the callback retained should be referenced via
a weak reference or a strong reference ( defaults to
holding a strong reference )
: type weak : bool
: returns : the listener that was registered
: rtype : : py : class : ` ~ . Listener `"""
|
if not six . callable ( callback ) :
raise ValueError ( "Event callback must be callable" )
if details_filter is not None :
if not six . callable ( details_filter ) :
raise ValueError ( "Details filter must be callable" )
if not self . can_be_registered ( event_type ) :
raise ValueError ( "Disallowed event type '%s' can not have a" " callback registered" % event_type )
if kwargs :
for k in self . RESERVED_KEYS :
if k in kwargs :
raise KeyError ( "Reserved key '%s' not allowed in " "kwargs" % k )
with self . _lock :
if self . is_registered ( event_type , callback , details_filter = details_filter ) :
raise ValueError ( "Event callback already registered with" " equivalent details filter" )
listener = Listener ( _make_ref ( callback , weak = weak ) , args = args , kwargs = kwargs , details_filter = details_filter , weak = weak )
listeners = self . _topics . setdefault ( event_type , [ ] )
listeners . append ( listener )
return listener
|
def zlib_decompress_to_string ( blob ) :
"""Decompress things to a string in a py2/3 safe fashion
> > > json _ str = ' { " test " : 1 } '
> > > blob = zlib _ compress ( json _ str )
> > > got _ str = zlib _ decompress _ to _ string ( blob )
> > > got _ str = = json _ str
True"""
|
if PY3K :
if isinstance ( blob , bytes ) :
decompressed = zlib . decompress ( blob )
else :
decompressed = zlib . decompress ( bytes ( blob , 'utf-8' ) )
return decompressed . decode ( 'utf-8' )
return zlib . decompress ( blob )
|
def republish_module_trigger ( plpy , td ) :
"""Trigger called from postgres database when republishing a module .
When a module is republished , the versions of the collections that it is
part of will need to be updated ( a minor update ) .
e . g . there is a collection c1 v2.1 , which contains module m1 v3
m1 is updated , we have a new row in the modules table with m1 v4
this trigger will create increment the minor version of c1 , so we ' ll have
c1 v2.2
we need to create a collection tree for c1 v2.2 which is exactly the same
as c1 v2.1 , but with m1 v4 instead of m1 v3 , and c1 v2.2 instead of c1 v2.2"""
|
# Is this an insert from legacy ? Legacy always supplies the version .
is_legacy_publication = td [ 'new' ] [ 'version' ] is not None
if not is_legacy_publication : # Bail out , because this trigger only applies to legacy publications .
return "OK"
plpy . log ( 'Trigger fired on %s' % ( td [ 'new' ] [ 'moduleid' ] , ) )
modified = republish_module ( td , plpy )
plpy . log ( 'modified: {}' . format ( modified ) )
plpy . log ( 'insert values:\n{}\n' . format ( '\n' . join ( [ '{}: {}' . format ( key , value ) for key , value in td [ 'new' ] . items ( ) ] ) ) )
return modified
|
def alloc ( self ) :
"""from _ mosquitto _ packet _ alloc ."""
|
byte = 0
remaining_bytes = bytearray ( 5 )
i = 0
remaining_length = self . remaining_length
self . payload = None
self . remaining_count = 0
loop_flag = True
# self . dump ( )
while loop_flag :
byte = remaining_length % 128
remaining_length = remaining_length / 128
if remaining_length > 0 :
byte = byte | 0x80
remaining_bytes [ self . remaining_count ] = byte
self . remaining_count += 1
if not ( remaining_length > 0 and self . remaining_count < 5 ) :
loop_flag = False
if self . remaining_count == 5 :
return NC . ERR_PAYLOAD_SIZE
self . packet_length = self . remaining_length + 1 + self . remaining_count
self . payload = bytearray ( self . packet_length )
self . payload [ 0 ] = self . command
i = 0
while i < self . remaining_count :
self . payload [ i + 1 ] = remaining_bytes [ i ]
i += 1
self . pos = 1 + self . remaining_count
return NC . ERR_SUCCESS
|
def file_pour ( filepath , block_size = 10240 , * args , ** kwargs ) :
"""Write physical files from entries ."""
|
def opener ( archive_res ) :
_LOGGER . debug ( "Opening from file (file_pour): %s" , filepath )
_archive_read_open_filename ( archive_res , filepath , block_size )
return _pour ( opener , * args , flags = 0 , ** kwargs )
|
def has_attribute ( self , name , alias = False ) :
"""Check if the entity contains the attribute * name *"""
|
prop_dict = merge_dicts ( self . __attributes__ , self . __fields__ , self . __relations__ )
if alias :
prop_dict . update ( { v . alias : v for v in prop_dict . values ( ) if v . alias is not None } )
return name in prop_dict
|
def apply_host_template ( self , host_ids , start_roles ) :
"""Apply a host template identified by name on the specified hosts and
optionally start them .
@ param host _ ids : List of host ids .
@ param start _ roles : Whether to start the created roles or not .
@ return : An ApiCommand object ."""
|
return apply_host_template ( self . _get_resource_root ( ) , self . name , self . clusterRef . clusterName , host_ids , start_roles )
|
def process_loaded_configs ( self , values ) :
"""Takes the loaded config values ( from YAML files ) and performs the
following clean up steps :
1 . remove all value keys that are not uppercase
2 . resolve any keys with missing values
Note : resolving missing values does not fail fast , we will collect
all missing values and report it to a post handler , then finally fail .
@ param values dictionary of raw , newly loaded config values"""
|
unresolved_value_keys = self . _process_config_values ( [ ] , values , [ ] )
if len ( unresolved_value_keys ) > 0 :
msg = "Unresolved values for: {}" . format ( unresolved_value_keys )
# Even though we will fail , there might be a situation when we want to
# do something with the list of missing values , so pass it to a handler .
self . on_process_loaded_configs_failure ( values , unresolved_value_keys )
if self . ignore_errors : # If we ' re ignoring errors , at least log it
logging . warn ( msg )
else : # end program
raise LookupError ( msg )
# All the config values were checked and everything looks good ,
# let ' s inform post handler for any additional work .
self . on_process_loaded_configs_complete ( values )
return values
|
def enable_all_cpu ( self ) :
'''Enable all offline cpus'''
|
for cpu in self . __get_ranges ( "offline" ) :
fpath = path . join ( "cpu%i" % cpu , "online" )
self . __write_cpu_file ( fpath , b"1" )
|
def __get_values ( self ) :
"""Gets values in this cell range as a tuple .
This is much more effective than reading cell values one by one ."""
|
array = self . _get_target ( ) . getDataArray ( )
return tuple ( itertools . chain . from_iterable ( array ) )
|
def getStartdatetime ( self ) :
"""Returns the date and starttime as datetime object
Parameters
None
Examples
> > > import pyedflib
> > > f = pyedflib . data . test _ generator ( )
> > > f . getStartdatetime ( )
datetime . datetime ( 2011 , 4 , 4 , 12 , 57 , 2)
> > > f . _ close ( )
> > > del f"""
|
return datetime ( self . startdate_year , self . startdate_month , self . startdate_day , self . starttime_hour , self . starttime_minute , self . starttime_second )
|
def render ( self , filename ) :
"""Perform initialization of render , set quality and size video attributes and then call template method that
is defined in child class ."""
|
self . elapsed_time = - time ( )
dpi = 100
fig = figure ( figsize = ( 16 , 9 ) , dpi = dpi )
with self . writer . saving ( fig , filename , dpi ) :
for frame_id in xrange ( self . frames + 1 ) :
self . renderFrame ( frame_id )
self . writer . grab_frame ( )
self . elapsed_time += time ( )
|
def get_learned_skills ( self , lang ) :
"""Return the learned skill objects sorted by the order they were learned
in ."""
|
skills = [ skill for skill in self . user_data . language_data [ lang ] [ 'skills' ] ]
self . _compute_dependency_order ( skills )
return [ skill for skill in sorted ( skills , key = lambda skill : skill [ 'dependency_order' ] ) if skill [ 'learned' ] ]
|
def from_blaze ( expr , deltas = 'auto' , checkpoints = 'auto' , loader = None , resources = None , odo_kwargs = None , missing_values = None , domain = GENERIC , no_deltas_rule = 'warn' , no_checkpoints_rule = 'warn' ) :
"""Create a Pipeline API object from a blaze expression .
Parameters
expr : Expr
The blaze expression to use .
deltas : Expr , ' auto ' or None , optional
The expression to use for the point in time adjustments .
If the string ' auto ' is passed , a deltas expr will be looked up
by stepping up the expression tree and looking for another field
with the name of ` ` expr . _ name ` ` + ' _ deltas ' . If None is passed , no
deltas will be used .
checkpoints : Expr , ' auto ' or None , optional
The expression to use for the forward fill checkpoints .
If the string ' auto ' is passed , a checkpoints expr will be looked up
by stepping up the expression tree and looking for another field
with the name of ` ` expr . _ name ` ` + ' _ checkpoints ' . If None is passed ,
no checkpoints will be used .
loader : BlazeLoader , optional
The blaze loader to attach this pipeline dataset to . If None is passed ,
the global blaze loader is used .
resources : dict or any , optional
The data to execute the blaze expressions against . This is used as the
scope for ` ` bz . compute ` ` .
odo _ kwargs : dict , optional
The keyword arguments to pass to odo when evaluating the expressions .
domain : zipline . pipeline . domain . Domain
Domain of the dataset to be created .
missing _ values : dict [ str - > any ] , optional
A dict mapping column names to missing values for those columns .
Missing values are required for integral columns .
no _ deltas _ rule : { ' warn ' , ' raise ' , ' ignore ' } , optional
What should happen if ` ` deltas = ' auto ' ` ` but no deltas can be found .
' warn ' says to raise a warning but continue .
' raise ' says to raise an exception if no deltas can be found .
' ignore ' says take no action and proceed with no deltas .
no _ checkpoints _ rule : { ' warn ' , ' raise ' , ' ignore ' } , optional
What should happen if ` ` checkpoints = ' auto ' ` ` but no checkpoints can be
found . ' warn ' says to raise a warning but continue .
' raise ' says to raise an exception if no deltas can be found .
' ignore ' says take no action and proceed with no deltas .
Returns
pipeline _ api _ obj : DataSet or BoundColumn
Either a new dataset or bound column based on the shape of the expr
passed in . If a table shaped expression is passed , this will return
a ` ` DataSet ` ` that represents the whole table . If an array - like shape
is passed , a ` ` BoundColumn ` ` on the dataset that would be constructed
from passing the parent is returned ."""
|
if 'auto' in { deltas , checkpoints } :
invalid_nodes = tuple ( filter ( is_invalid_deltas_node , expr . _subterms ( ) ) )
if invalid_nodes :
raise TypeError ( 'expression with auto %s may only contain (%s) nodes,' " found: %s" % ( ' or ' . join ( [ 'deltas' ] if deltas is not None else [ ] + [ 'checkpoints' ] if checkpoints is not None else [ ] , ) , ', ' . join ( map ( get__name__ , valid_deltas_node_types ) ) , ', ' . join ( set ( map ( compose ( get__name__ , type ) , invalid_nodes ) ) , ) , ) , )
deltas = _get_metadata ( 'deltas' , expr , deltas , no_deltas_rule , )
checkpoints = _get_metadata ( 'checkpoints' , expr , checkpoints , no_checkpoints_rule , )
# Check if this is a single column out of a dataset .
if bz . ndim ( expr ) != 1 :
raise TypeError ( 'expression was not tabular or array-like,' ' %s dimensions: %d' % ( 'too many' if bz . ndim ( expr ) > 1 else 'not enough' , bz . ndim ( expr ) , ) , )
single_column = None
if isscalar ( expr . dshape . measure ) : # This is a single column . Record which column we are to return
# but create the entire dataset .
single_column = rename = expr . _name
field_hit = False
if not isinstance ( expr , traversable_nodes ) :
raise TypeError ( "expression '%s' was array-like but not a simple field of" " some larger table" % str ( expr ) , )
while isinstance ( expr , traversable_nodes ) :
if isinstance ( expr , bz . expr . Field ) :
if not field_hit :
field_hit = True
else :
break
rename = expr . _name
expr = expr . _child
dataset_expr = expr . relabel ( { rename : single_column } )
else :
dataset_expr = expr
measure = dataset_expr . dshape . measure
if not isrecord ( measure ) or AD_FIELD_NAME not in measure . names :
raise TypeError ( "The dataset must be a collection of records with at least an" " '{ad}' field. Fields provided: '{fields}'\nhint: maybe you need" " to use `relabel` to change your field names" . format ( ad = AD_FIELD_NAME , fields = measure , ) , )
_check_datetime_field ( AD_FIELD_NAME , measure )
dataset_expr , deltas , checkpoints = _ensure_timestamp_field ( dataset_expr , deltas , checkpoints , )
if deltas is not None and ( sorted ( deltas . dshape . measure . fields ) != sorted ( measure . fields ) ) :
raise TypeError ( 'baseline measure != deltas measure:\n%s != %s' % ( measure , deltas . dshape . measure , ) , )
if ( checkpoints is not None and ( sorted ( checkpoints . dshape . measure . fields ) != sorted ( measure . fields ) ) ) :
raise TypeError ( 'baseline measure != checkpoints measure:\n%s != %s' % ( measure , checkpoints . dshape . measure , ) , )
# Ensure that we have a data resource to execute the query against .
_check_resources ( 'expr' , dataset_expr , resources )
_check_resources ( 'deltas' , deltas , resources )
_check_resources ( 'checkpoints' , checkpoints , resources )
# Create or retrieve the Pipeline API dataset .
if missing_values is None :
missing_values = { }
ds = new_dataset ( dataset_expr , frozenset ( missing_values . items ( ) ) , domain )
# Register our new dataset with the loader .
( loader if loader is not None else global_loader ) . register_dataset ( ds , bind_expression_to_resources ( dataset_expr , resources ) , bind_expression_to_resources ( deltas , resources ) if deltas is not None else None , bind_expression_to_resources ( checkpoints , resources ) if checkpoints is not None else None , odo_kwargs = odo_kwargs , )
if single_column is not None : # We were passed a single column , extract and return it .
return getattr ( ds , single_column )
return ds
|
def _remove_code ( site ) :
"""Delete project files
@ type site : Site"""
|
def handle_error ( function , path , excinfo ) :
click . secho ( 'Failed to remove path ({em}): {p}' . format ( em = excinfo . message , p = path ) , err = True , fg = 'red' )
if os . path . exists ( site . root ) :
shutil . rmtree ( site . root , onerror = handle_error )
|
def update_long ( self , ** kwargs ) :
"""Update the long optional arguments ( those with two leading ' - ' )
This method updates the short argument name for the specified function
arguments as stored in : attr : ` unfinished _ arguments `
Parameters
` ` * * kwargs ` `
Keywords must be keys in the : attr : ` unfinished _ arguments `
dictionary ( i . e . keywords of the root functions ) , values the long
argument names
Examples
Setting : :
> > > parser . update _ long ( something = ' s ' , something _ else = ' se ' )
is basically the same as : :
> > > parser . update _ arg ( ' something ' , long = ' s ' )
> > > parser . update _ arg ( ' something _ else ' , long = ' se ' )
which in turn is basically comparable to : :
> > > parser . add _ argument ( ' - - s ' , dest = ' something ' , . . . )
> > > parser . add _ argument ( ' - - se ' , dest = ' something _ else ' , . . . )
See Also
update _ short , update _ longf"""
|
for key , val in six . iteritems ( kwargs ) :
self . update_arg ( key , long = val )
|
def unicode_compatible ( cls ) :
"""Decorator for unicode compatible classes . Method ` ` _ _ unicode _ _ ` `
has to be implemented to work decorator as expected ."""
|
if PY3 :
cls . __str__ = cls . __unicode__
cls . __bytes__ = lambda self : self . __str__ ( ) . encode ( "utf-8" )
else :
cls . __str__ = lambda self : self . __unicode__ ( ) . encode ( "utf-8" )
return cls
|
def sg_arg_def ( ** kwargs ) :
r"""Defines command line options
Args :
* * kwargs :
key : A name for the option .
value : Default value or a tuple of ( default value , description ) .
Returns :
None
For example ,
# Either of the following two lines will define ` - - n _ epoch ` command line argument and set its default value as 1.
tf . sg _ arg _ def ( n _ epoch = 1)
tf . sg _ arg _ def ( n _ epoch = ( 1 , ' total number of epochs ' ) )"""
|
for k , v in kwargs . items ( ) :
if type ( v ) is tuple or type ( v ) is list :
v , c = v [ 0 ] , v [ 1 ]
else :
c = k
if type ( v ) is str :
tf . app . flags . DEFINE_string ( k , v , c )
elif type ( v ) is int :
tf . app . flags . DEFINE_integer ( k , v , c )
elif type ( v ) is float :
tf . app . flags . DEFINE_float ( k , v , c )
elif type ( v ) is bool :
tf . app . flags . DEFINE_bool ( k , v , c )
|
def verify_geospatial_bounds ( self , ds ) :
"""Checks that the geospatial bounds is well formed OGC WKT"""
|
var = getattr ( ds , 'geospatial_bounds' , None )
check = var is not None
if not check :
return ratable_result ( False , "Global Attributes" , # grouped with Globals
[ "geospatial_bounds not present" ] )
try : # TODO : verify that WKT is valid given CRS ( defaults to EPSG : 4326
# in ACDD .
from_wkt ( ds . geospatial_bounds )
except AttributeError :
return ratable_result ( False , "Global Attributes" , # grouped with Globals
[ 'Could not parse WKT, possible bad value for WKT' ] )
# parsed OK
else :
return ratable_result ( True , "Global Attributes" , tuple ( ) )
|
def get_item_representations ( self , features = None ) :
"""Get the latent representations for items given model and features .
Arguments
features : np . float32 csr _ matrix of shape [ n _ items , n _ item _ features ] , optional
Each row contains that item ' s weights over features .
An identity matrix will be used if not supplied .
Returns
( item _ biases , item _ embeddings ) :
( np . float32 array of shape n _ items ,
np . float32 array of shape [ n _ items , num _ components ]
Biases and latent representations for items ."""
|
self . _check_initialized ( )
if features is None :
return self . item_biases , self . item_embeddings
features = sp . csr_matrix ( features , dtype = CYTHON_DTYPE )
return features * self . item_biases , features * self . item_embeddings
|
def increase_route_count ( self , crawled_request ) :
"""Increase the count that determines how many times a URL of a certain route has been crawled .
Args :
crawled _ request ( : class : ` nyawc . http . Request ` ) : The request that possibly matches a route ."""
|
for route in self . __routing_options . routes :
if re . compile ( route ) . match ( crawled_request . url ) :
count_key = str ( route ) + crawled_request . method
if count_key in self . __routing_count . keys ( ) :
self . __routing_count [ count_key ] += 1
else :
self . __routing_count [ count_key ] = 1
break
|
def get_property ( self , name ) :
"""get _ property ( property _ name : str ) - > object
Retrieves a property value ."""
|
if not hasattr ( self . props , name ) :
raise TypeError ( "Unknown property: %r" % name )
return getattr ( self . props , name )
|
def recv_filtered ( self , keycheck , tab_key , timeout = 30 , message = None ) :
'''Receive a filtered message , using the callable ` keycheck ` to filter received messages
for content .
` keycheck ` is expected to be a callable that takes a single parameter ( the decoded response
from chromium ) , and returns a boolean ( true , if the command is the one filtered for , or false
if the command is not the one filtered for ) .
This is used internally , for example , by ` recv ( ) ` , to filter the response for a specific ID :
def check _ func ( message ) :
if message _ id is None :
return True
if " id " in message :
return message [ ' id ' ] = = message _ id
return False
return self . recv _ filtered ( check _ func , timeout )
Note that the function is defined dynamically , and ` message _ id ` is captured via closure .'''
|
self . __check_open_socket ( tab_key )
# First , check if the message has already been received .
for idx in range ( len ( self . messages [ tab_key ] ) ) :
if keycheck ( self . messages [ tab_key ] [ idx ] ) :
return self . messages [ tab_key ] . pop ( idx )
timeout_at = time . time ( ) + timeout
while 1 :
tmp = self . ___recv ( tab_key )
if keycheck ( tmp ) :
return tmp
else :
self . messages [ tab_key ] . append ( tmp )
if time . time ( ) > timeout_at :
if message :
raise cr_exceptions . ChromeResponseNotReceived ( "Failed to receive response in recv_filtered() (%s)" % message )
else :
raise cr_exceptions . ChromeResponseNotReceived ( "Failed to receive response in recv_filtered()" )
else :
time . sleep ( 0.005 )
|
def refcount ( self ) :
"""Number of references of an article .
Note : Requires the FULL view of the article ."""
|
refs = self . items . find ( 'bibrecord/tail/bibliography' , ns )
try :
return refs . attrib [ 'refcount' ]
except AttributeError : # refs is None
return None
|
def accumulate ( self , buf ) :
'''add in some more bytes'''
|
bytes = array . array ( 'B' )
if isinstance ( buf , array . array ) :
bytes . extend ( buf )
else :
bytes . fromstring ( buf )
accum = self . crc
for b in bytes :
tmp = b ^ ( accum & 0xff )
tmp = ( tmp ^ ( tmp << 4 ) ) & 0xFF
accum = ( accum >> 8 ) ^ ( tmp << 8 ) ^ ( tmp << 3 ) ^ ( tmp >> 4 )
accum = accum & 0xFFFF
self . crc = accum
|
def reset ( self ) :
"""Reset widget to original state ."""
|
self . filename = None
self . dataset = None
# about the recordings
self . idx_filename . setText ( 'Open Recordings...' )
self . idx_s_freq . setText ( '' )
self . idx_n_chan . setText ( '' )
self . idx_start_time . setText ( '' )
self . idx_end_time . setText ( '' )
# about the visualization
self . idx_scaling . setText ( '' )
self . idx_distance . setText ( '' )
self . idx_length . setText ( '' )
self . idx_start . setText ( '' )
|
def address ( self , is_compressed = None ) :
"""Return the public address representation of this key , if available ."""
|
return self . _network . address . for_p2pkh ( self . hash160 ( is_compressed = is_compressed ) )
|
def ConsultarAjuste ( self , pto_emision = None , nro_orden = None , nro_contrato = None , coe = None , pdf = None ) :
"Consulta un ajuste de liquidación por No de orden o numero de contrato"
|
if nro_contrato :
ret = self . client . ajustePorContratoConsultar ( auth = { 'token' : self . Token , 'sign' : self . Sign , 'cuit' : self . Cuit , } , nroContrato = nro_contrato , )
ret = ret [ 'ajusteContratoReturn' ]
elif coe is None or pdf is None :
ret = self . client . ajusteXNroOrdenConsultar ( auth = { 'token' : self . Token , 'sign' : self . Sign , 'cuit' : self . Cuit , } , ptoEmision = pto_emision , nroOrden = nro_orden , pdf = 'S' if pdf else 'N' , )
ret = ret [ 'ajusteXNroOrdenConsReturn' ]
else :
ret = self . client . ajusteXCoeConsultar ( auth = { 'token' : self . Token , 'sign' : self . Sign , 'cuit' : self . Cuit , } , coe = coe , pdf = 'S' if pdf else 'N' , )
ret = ret [ 'ajusteConsReturn' ]
self . __analizar_errores ( ret )
if 'ajusteUnificado' in ret :
aut = ret [ 'ajusteUnificado' ]
self . AnalizarAjuste ( aut )
# guardo el PDF si se indico archivo y vino en la respuesta :
if pdf and 'pdf' in ret :
open ( pdf , "wb" ) . write ( ret [ 'pdf' ] )
return True
|
def initial_digit_of_factorial ( num ) :
"""This Python function calculates the first digit of the factorial of a given input number .
Example :
initial _ digit _ of _ factorial ( 5 ) - - > 1
initial _ digit _ of _ factorial ( 10 ) - - > 3
initial _ digit _ of _ factorial ( 7 ) - - > 5
Args :
num : The input number .
Returns :
The first digit of the factorial of the input number ."""
|
import math
fact_num = 1
# Calculation of factorial
for i in range ( 2 , num + 1 ) :
fact_num *= i
# Remove trailing zeros from the right side
while fact_num % 10 == 0 :
fact_num //= 10
# Get the first digit from the left side
while fact_num >= 10 :
fact_num /= 10
return math . floor ( fact_num )
|
def get_timeline ( self , auth_secret , max_cnt_tweets ) :
"""Get the general or user timeline .
If an empty authentication secret is given , this method returns the general timeline .
If an authentication secret is given and it is valid , this method returns the user timeline .
If an authentication secret is given but it is invalid , this method returns an error .
Parameters
auth _ secret : str
Either the authentication secret of the logged - in user or an empty string .
max _ cnt _ tweets : int
The maximum number of tweets included in the timeline . If it is set to - 1,
then all the available tweets will be included .
Returns
bool
True if the timeline is successfully retrieved , False otherwise .
result
A dict containing a list of tweets with the key TWEETS _ KEY if
the timeline is successfully retrieved , a dict containing
the error string with the key ERROR _ KEY otherwise .
Note
Possible error strings are listed as below :
- ERROR _ NOT _ LOGGED _ IN"""
|
result = { pytwis_constants . ERROR_KEY : None }
if auth_secret == '' : # An empty authentication secret implies getting the general timeline .
timeline_key = pytwis_constants . GENERAL_TIMELINE_KEY
else : # Check if the user is logged in .
loggedin , userid = self . _is_loggedin ( auth_secret )
if not loggedin :
result [ pytwis_constants . ERROR_KEY ] = pytwis_constants . ERROR_NOT_LOGGED_IN
return ( False , result )
# Get the user timeline .
timeline_key = pytwis_constants . USER_TIMELINE_KEY_FORMAT . format ( userid )
result [ pytwis_constants . TWEETS_KEY ] = self . _get_tweets ( timeline_key , max_cnt_tweets )
return ( True , result )
|
def url ( value ) :
"""Validate a URL .
: param string value : The URL to validate
: returns : The URL if valid .
: raises : ValueError"""
|
if not url_regex . search ( value ) :
message = u"{0} is not a valid URL" . format ( value )
if url_regex . search ( 'http://' + value ) :
message += u". Did you mean: http://{0}" . format ( value )
raise ValueError ( message )
return value
|
def predict ( self , x ) :
"""Predict values for a single data point or an RDD of points using
the model trained .
. . note : : In Python , predict cannot currently be used within an RDD
transformation or action .
Call predict directly on the RDD instead ."""
|
if isinstance ( x , RDD ) :
return self . call ( "predict" , x . map ( _convert_to_vector ) )
else :
return self . call ( "predict" , _convert_to_vector ( x ) )
|
def _get_draw_cache_key ( self , grid , key , drawn_rect , is_selected ) :
"""Returns key for the screen draw cache"""
|
row , col , tab = key
cell_attributes = grid . code_array . cell_attributes
zoomed_width = drawn_rect . width / self . zoom
zoomed_height = drawn_rect . height / self . zoom
# Button cells shall not be executed for preview
if grid . code_array . cell_attributes [ key ] [ "button_cell" ] :
cell_preview = repr ( grid . code_array ( key ) ) [ : 100 ]
__id = id ( grid . code_array ( key ) )
else :
cell_preview = repr ( grid . code_array [ key ] ) [ : 100 ]
__id = id ( grid . code_array [ key ] )
sorted_keys = sorted ( grid . code_array . cell_attributes [ key ] . iteritems ( ) )
key_above_left = row - 1 , col - 1 , tab
key_above = row - 1 , col , tab
key_above_right = row - 1 , col + 1 , tab
key_left = row , col - 1 , tab
key_right = row , col + 1 , tab
key_below_left = row + 1 , col - 1 , tab
key_below = row + 1 , col , tab
borders = [ ]
for k in [ key , key_above_left , key_above , key_above_right , key_left , key_right , key_below_left , key_below ] :
borders . append ( cell_attributes [ k ] [ "borderwidth_bottom" ] )
borders . append ( cell_attributes [ k ] [ "borderwidth_right" ] )
borders . append ( cell_attributes [ k ] [ "bordercolor_bottom" ] )
borders . append ( cell_attributes [ k ] [ "bordercolor_right" ] )
return ( zoomed_width , zoomed_height , is_selected , cell_preview , __id , tuple ( sorted_keys ) , tuple ( borders ) )
|
def read_namespaced_ingress_status ( self , name , namespace , ** kwargs ) : # noqa : E501
"""read _ namespaced _ ingress _ status # noqa : E501
read status of the specified Ingress # noqa : E501
This method makes a synchronous HTTP request by default . To make an
asynchronous HTTP request , please pass async _ req = True
> > > thread = api . read _ namespaced _ ingress _ status ( name , namespace , async _ req = True )
> > > result = thread . get ( )
: param async _ req bool
: param str name : name of the Ingress ( required )
: param str namespace : object name and auth scope , such as for teams and projects ( required )
: param str pretty : If ' true ' , then the output is pretty printed .
: return : V1beta1Ingress
If the method is called asynchronously ,
returns the request thread ."""
|
kwargs [ '_return_http_data_only' ] = True
if kwargs . get ( 'async_req' ) :
return self . read_namespaced_ingress_status_with_http_info ( name , namespace , ** kwargs )
# noqa : E501
else :
( data ) = self . read_namespaced_ingress_status_with_http_info ( name , namespace , ** kwargs )
# noqa : E501
return data
|
def on_epoch_end ( self , pbar , epoch , last_metrics , ** kwargs ) :
"Put the various losses in the recorder and show a sample image ."
|
if not hasattr ( self , 'last_gen' ) or not self . show_img :
return
data = self . learn . data
img = self . last_gen [ 0 ]
norm = getattr ( data , 'norm' , False )
if norm and norm . keywords . get ( 'do_y' , False ) :
img = data . denorm ( img )
img = data . train_ds . y . reconstruct ( img )
self . imgs . append ( img )
self . titles . append ( f'Epoch {epoch}' )
pbar . show_imgs ( self . imgs , self . titles )
return add_metrics ( last_metrics , [ getattr ( self . smoothenerG , 'smooth' , None ) , getattr ( self . smoothenerC , 'smooth' , None ) ] )
|
def create_subscription ( self , * , customer_id , credit_card_token , plan_code , quantity = None , installments = None , trial_days = None , immediate_payment = None , extra1 = None , extra2 = None , delivery_address = None , notify_url = None , recurring_bill_items = None ) :
"""Creating a new subscription of a client to a plan .
Args :
customer _ id : Customer that will be associated to the subscription .
You can find more information in the " Customer " section of this page .
credit _ card _ token : Customer ' s credit card that is selected to make the payment .
You can find more information in the " Credit card " section of this page .
plan _ code : Plan that will be associated to the subscription .
You can find more information in the " Plan " section of this page .
quantity : Total amount of plans that will be acquired with the subscription .
Numeric .
installments : Total amount of installments to defer the payment .
Numeric .
trial _ days : Total amount of trial days of the subscription .
This variable has preference over the plan ' s trial days .
Numeric .
immediate _ payment :
extra1:
extra2:
delivery _ address :
notify _ url :
recurring _ bill _ items :
Returns :"""
|
payload = { "quantity" : quantity , "installments" : installments , "trialDays" : trial_days , "immediatePayment" : immediate_payment , "extra1" : extra1 , "extra2" : extra2 , "customer" : { "id" : customer_id , "creditCards" : [ { "token" : credit_card_token } ] } , "plan" : { "planCode" : plan_code } , "deliveryAddress" : delivery_address , "notifyUrl" : notify_url , "recurringBillItems" : recurring_bill_items }
return self . client . _post ( self . url + 'subscriptions' , json = payload , headers = self . get_headers ( ) )
|
def service ( container , name = None ) :
"""A decorator to register a service on a container .
For more information see : meth : ` Container . add _ service ` ."""
|
def register ( service ) :
container . add_service ( service , name )
return service
return register
|
def desc ( t = None , reg = True ) :
"""Describe Class Dependency
: param reg : should we register this class as well
: param t : custom type as well
: return :"""
|
def decorated_fn ( cls ) :
if not inspect . isclass ( cls ) :
return NotImplemented ( 'For now we can only describe classes' )
name = t or camel_case_to_underscore ( cls . __name__ ) [ 0 ]
if reg :
di . injector . register ( name , cls )
else :
di . injector . describe ( name , cls )
return cls
return decorated_fn
|
def _try_decode_utf8_content ( self , content , content_type ) :
"""Generic function to decode content .
: param object content :
: return :"""
|
if not self . _auto_decode or not content :
return content
if content_type in self . _decode_cache :
return self . _decode_cache [ content_type ]
if isinstance ( content , dict ) :
content = self . _try_decode_dict ( content )
else :
content = try_utf8_decode ( content )
self . _decode_cache [ content_type ] = content
return content
|
def make_cfglbls ( cfgdict_list , varied_dict ) :
"""Show only the text in labels that mater from the cfgdict"""
|
import textwrap
wrapper = textwrap . TextWrapper ( width = 50 )
cfglbl_list = [ ]
for cfgdict_ in cfgdict_list :
cfgdict = cfgdict_ . copy ( )
for key in six . iterkeys ( cfgdict_ ) :
try :
vals = varied_dict [ key ]
# Dont print label if not varied
if len ( vals ) == 1 :
del cfgdict [ key ]
else : # Dont print label if it is None ( irrelevant )
if cfgdict [ key ] is None :
del cfgdict [ key ]
except KeyError : # Don ' t print keys not in varydict
del cfgdict [ key ]
cfglbl = six . text_type ( cfgdict )
search_repl_list = [ ( '\'' , '' ) , ( '}' , '' ) , ( '{' , '' ) , ( ': ' , '=' ) ]
for search , repl in search_repl_list :
cfglbl = cfglbl . replace ( search , repl )
# cfglbl = str ( cfgdict ) . replace ( ' \ ' ' , ' ' ) . replace ( ' } ' , ' ' ) . replace ( ' { ' , ' ' ) . replace ( ' : ' , ' = ' )
cfglbl = ( '\n' . join ( wrapper . wrap ( cfglbl ) ) )
cfglbl_list . append ( cfglbl )
return cfglbl_list
|
def symlinks ( self ) :
"""Known symlinks of the block device ."""
|
if not self . _P . Block . Symlinks :
return [ ]
return [ decode_ay ( path ) for path in self . _P . Block . Symlinks ]
|
def remove_node_by_value ( self , value ) :
"""Delete all nodes in ` ` self . node _ list ` ` with the value ` ` value ` ` .
Args :
value ( Any ) : The value to find and delete owners of .
Returns : None
Example :
> > > from blur . markov . node import Node
> > > node _ 1 = Node ( ' One ' )
> > > graph = Graph ( [ node _ 1 ] )
> > > graph . remove _ node _ by _ value ( ' One ' )
> > > len ( graph . node _ list )"""
|
self . node_list = [ node for node in self . node_list if node . value != value ]
# Remove links pointing to the deleted node
for node in self . node_list :
node . link_list = [ link for link in node . link_list if link . target . value != value ]
|
def write_stats ( datadfs , outputfile , names = [ ] ) :
"""Call calculation functions and write stats file .
This function takes a list of DataFrames ,
and will create a column for each in the tab separated output ."""
|
if outputfile == 'stdout' :
output = sys . stdout
else :
output = open ( outputfile , 'wt' )
stats = [ Stats ( df ) for df in datadfs ]
features = { "Number of reads" : "number_of_reads" , "Total bases" : "number_of_bases" , "Total bases aligned" : "number_of_bases_aligned" , "Median read length" : "median_read_length" , "Mean read length" : "mean_read_length" , "Read length N50" : "n50" , "Average percent identity" : "average_identity" , "Median percent identity" : "median_identity" , "Active channels" : "active_channels" , "Mean read quality" : "mean_qual" , "Median read quality" : "median_qual" , }
max_len = max ( [ len ( k ) for k in features . keys ( ) ] )
try :
max_num = max ( max ( [ len ( str ( s . number_of_bases ) ) for s in stats ] ) , max ( [ len ( str ( n ) ) for n in names ] ) ) + 6
except ValueError :
max_num = max ( [ len ( str ( s . number_of_bases ) ) for s in stats ] ) + 6
output . write ( "{:<{}}{}\n" . format ( 'General summary:' , max_len , " " . join ( [ '{:>{}}' . format ( n , max_num ) for n in names ] ) ) )
for f in sorted ( features . keys ( ) ) :
try :
output . write ( "{f:{pad}}{v}\n" . format ( f = f + ':' , pad = max_len , v = feature_list ( stats , features [ f ] , padding = max_num ) ) )
except KeyError :
pass
if all ( [ "quals" in df for df in datadfs ] ) :
long_features = { "Top 5 longest reads and their mean basecall quality score" : [ "top5_lengths" , range ( 1 , 6 ) ] , "Top 5 highest mean basecall quality scores and their read lengths" : [ "top5_quals" , range ( 1 , 6 ) ] , "Number, percentage and megabases of reads above quality cutoffs" : [ "reads_above_qual" , [ ">Q" + str ( q ) for q in stats [ 0 ] . qualgroups ] ] , }
for lf in sorted ( long_features . keys ( ) ) :
output . write ( lf + "\n" )
for i in range ( 5 ) :
output . write ( "{}:\t{}\n" . format ( long_features [ lf ] [ 1 ] [ i ] , feature_list ( stats , long_features [ lf ] [ 0 ] , index = i ) ) )
|
def add_field ( self , field ) :
"""Add the received field to the model ."""
|
self . remove_field ( field . name )
self . _fields [ field . name ] = field
if field . default is not None :
if six . callable ( field . default ) :
self . _default_callables [ field . key ] = field . default
else :
self . _defaults [ field . key ] = field . default
|
def validate ( fname ) :
"""This function uses dciodvfy to generate
a list of warnings and errors discovered within
the DICOM file .
: param fname : Location and filename of DICOM file ."""
|
validation = { "errors" : [ ] , "warnings" : [ ] }
for line in _process ( fname ) :
kind , message = _determine ( line )
if kind in validation :
validation [ kind ] . append ( message )
return validation
|
def _check_perms ( obj_name , obj_type , new_perms , cur_perms , access_mode , ret ) :
'''Helper function used by ` ` check _ perms ` ` for checking and setting Grant and
Deny permissions .
Args :
obj _ name ( str ) :
The name or full path to the object
obj _ type ( Optional [ str ] ) :
The type of object for which to check permissions . Default is ' file '
new _ perms ( dict ) :
A dictionary containing the user / group and the basic permissions to
check / grant , ie : ` ` { ' user ' : { ' perms ' : ' basic _ permission ' } } ` ` .
cur _ perms ( dict ) :
A dictionary containing the user / group permissions as they currently
exists on the target object .
access _ mode ( str ) :
The access mode to set . Either ` ` grant ` ` or ` ` deny ` `
ret ( dict ) :
A dictionary to append changes to and return . If not passed , will
create a new dictionary to return .
Returns :
dict : A dictionary of return data as expected by the state system'''
|
access_mode = access_mode . lower ( )
changes = { }
for user in new_perms :
applies_to_text = ''
# Check that user exists :
try :
user_name = get_name ( principal = user )
except CommandExecutionError :
ret [ 'comment' ] . append ( '{0} Perms: User "{1}" missing from Target System' '' . format ( access_mode . capitalize ( ) , user ) )
continue
# Get the proper applies _ to text
if 'applies_to' in new_perms [ user ] :
applies_to = new_perms [ user ] [ 'applies_to' ]
at_flag = flags ( ) . ace_prop [ 'file' ] [ applies_to ]
applies_to_text = flags ( ) . ace_prop [ 'file' ] [ at_flag ]
else :
applies_to = None
if user_name not in cur_perms [ 'Not Inherited' ] :
if user not in changes :
changes [ user ] = { }
changes [ user ] [ access_mode ] = new_perms [ user ] [ 'perms' ]
if applies_to :
changes [ user ] [ 'applies_to' ] = applies_to
else : # Check Perms for basic perms
if isinstance ( new_perms [ user ] [ 'perms' ] , six . string_types ) :
if not has_permission ( obj_name = obj_name , principal = user_name , permission = new_perms [ user ] [ 'perms' ] , access_mode = access_mode , obj_type = obj_type , exact = False ) :
if user not in changes :
changes [ user ] = { }
changes [ user ] [ access_mode ] = new_perms [ user ] [ 'perms' ]
# Check Perms for advanced perms
else :
for perm in new_perms [ user ] [ 'perms' ] :
if not has_permission ( obj_name = obj_name , principal = user_name , permission = perm , access_mode = access_mode , obj_type = obj_type , exact = False ) :
if user not in changes :
changes [ user ] = { access_mode : [ ] }
changes [ user ] [ access_mode ] . append ( perm )
# Check if applies _ to was passed
if applies_to : # Is there a deny / grant permission set
if access_mode in cur_perms [ 'Not Inherited' ] [ user_name ] : # If the applies to settings are different , use the new one
if not cur_perms [ 'Not Inherited' ] [ user_name ] [ access_mode ] [ 'applies to' ] == applies_to_text :
if user not in changes :
changes [ user ] = { }
changes [ user ] [ 'applies_to' ] = applies_to
if changes :
if 'perms' not in ret [ 'changes' ] :
ret [ 'changes' ] [ 'perms' ] = { }
for user in changes :
user_name = get_name ( principal = user )
if __opts__ [ 'test' ] is True :
if user not in ret [ 'changes' ] [ 'perms' ] :
ret [ 'changes' ] [ 'perms' ] [ user ] = { }
ret [ 'changes' ] [ 'perms' ] [ user ] [ access_mode ] = changes [ user ] [ access_mode ]
else : # Get applies _ to
applies_to = None
if 'applies_to' not in changes [ user ] : # Get current " applies to " settings from the file
if user_name in cur_perms [ 'Not Inherited' ] and access_mode in cur_perms [ 'Not Inherited' ] [ user_name ] :
for flag in flags ( ) . ace_prop [ obj_type ] :
if flags ( ) . ace_prop [ obj_type ] [ flag ] == cur_perms [ 'Not Inherited' ] [ user_name ] [ access_mode ] [ 'applies to' ] :
at_flag = flag
for flag1 in flags ( ) . ace_prop [ obj_type ] :
if salt . utils . win_dacl . flags ( ) . ace_prop [ obj_type ] [ flag1 ] == at_flag :
applies_to = flag1
if not applies_to :
if obj_type . lower ( ) in [ 'registry' , 'registry32' ] :
applies_to = 'this_key_subkeys'
else :
applies_to = 'this_folder_subfolders_files'
else :
applies_to = changes [ user ] [ 'applies_to' ]
perms = [ ]
if access_mode not in changes [ user ] : # Get current perms
# Check for basic perms
for perm in cur_perms [ 'Not Inherited' ] [ user_name ] [ access_mode ] [ 'permissions' ] :
for flag in flags ( ) . ace_perms [ obj_type ] [ 'basic' ] :
if flags ( ) . ace_perms [ obj_type ] [ 'basic' ] [ flag ] == perm :
perm_flag = flag
for flag1 in flags ( ) . ace_perms [ obj_type ] [ 'basic' ] :
if flags ( ) . ace_perms [ obj_type ] [ 'basic' ] [ flag1 ] == perm_flag :
perms = flag1
# Make a list of advanced perms
if not perms :
for perm in cur_perms [ 'Not Inherited' ] [ user_name ] [ access_mode ] [ 'permissions' ] :
for flag in flags ( ) . ace_perms [ obj_type ] [ 'advanced' ] :
if flags ( ) . ace_perms [ obj_type ] [ 'advanced' ] [ flag ] == perm :
perm_flag = flag
for flag1 in flags ( ) . ace_perms [ obj_type ] [ 'advanced' ] :
if flags ( ) . ace_perms [ obj_type ] [ 'advanced' ] [ flag1 ] == perm_flag :
perms . append ( flag1 )
else :
perms = changes [ user ] [ access_mode ]
try :
set_permissions ( obj_name = obj_name , principal = user_name , permissions = perms , access_mode = access_mode , applies_to = applies_to , obj_type = obj_type )
if user not in ret [ 'changes' ] [ 'perms' ] :
ret [ 'changes' ] [ 'perms' ] [ user ] = { }
ret [ 'changes' ] [ 'perms' ] [ user ] [ access_mode ] = changes [ user ] [ access_mode ]
except CommandExecutionError as exc :
ret [ 'result' ] = False
ret [ 'comment' ] . append ( 'Failed to change {0} permissions for "{1}" to {2}\n' 'Error: {3}' . format ( access_mode , user , changes [ user ] , exc . strerror ) )
return ret
|
def _get_image_workaround_seek ( self , idx ) :
"""Same as _ _ getitem _ _ but seek through the video beforehand
This is a workaround for an all - zero image returned by ` imageio ` ."""
|
warnings . warn ( "imageio workaround used!" )
cap = self . video_handle
mult = 50
for ii in range ( idx // mult ) :
cap . get_data ( ii * mult )
final = cap . get_data ( idx )
return final
|
async def on_shutdown ( app ) :
"""app SHUTDOWN event handler"""
|
for method in app . get ( "close_methods" , [ ] ) :
logger . debug ( "Calling < %s >" , method )
if asyncio . iscoroutinefunction ( method ) :
await method ( )
else :
method ( )
|
def perform ( self ) :
"""This method converts payload into args and calls the ` ` perform ` `
method on the payload class .
Before calling ` ` perform ` ` , a ` ` before _ perform ` ` class method
is called , if it exists . It takes a dictionary as an argument ;
currently the only things stored on the dictionary are the
args passed into ` ` perform ` ` and a timestamp of when the job
was enqueued .
Similarly , an ` ` after _ perform ` ` class method is called after
` ` perform ` ` is finished . The metadata dictionary contains the
same data , plus a timestamp of when the job was performed , a
` ` failed ` ` boolean value , and if it did fail , a ` ` retried ` `
boolean value . This method is called after retry , and is
called regardless of whether an exception is ultimately thrown
by the perform method ."""
|
payload_class_str = self . _payload [ "class" ]
payload_class = self . safe_str_to_class ( payload_class_str )
payload_class . resq = self . resq
args = self . _payload . get ( "args" )
metadata = dict ( args = args )
if self . enqueue_timestamp :
metadata [ "enqueue_timestamp" ] = self . enqueue_timestamp
before_perform = getattr ( payload_class , "before_perform" , None )
metadata [ "failed" ] = False
metadata [ "perform_timestamp" ] = time . time ( )
check_after = True
try :
if before_perform :
payload_class . before_perform ( metadata )
return payload_class . perform ( * args )
except Exception as e :
metadata [ "failed" ] = True
metadata [ "exception" ] = e
if not self . retry ( payload_class , args ) :
metadata [ "retried" ] = False
raise
else :
metadata [ "retried" ] = True
logging . exception ( "Retry scheduled after error in %s" , self . _payload )
finally :
after_perform = getattr ( payload_class , "after_perform" , None )
if after_perform :
payload_class . after_perform ( metadata )
delattr ( payload_class , 'resq' )
|
def drop ( self , columns ) :
"""Drop 1 or more columns . Any column which does not exist in the DataFrame is skipped , i . e . not removed ,
without raising an exception .
Unlike Pandas ' drop , this is currently restricted to dropping columns .
Parameters
columns : str or list of str
Column name or list of column names to drop .
Returns
DataFrame
A new DataFrame without these columns ."""
|
if isinstance ( columns , str ) :
new_data = OrderedDict ( )
if columns not in self . _gather_column_names ( ) :
raise KeyError ( 'Key {} not found' . format ( columns ) )
for column_name in self :
if column_name != columns :
new_data [ column_name ] = self . _data [ column_name ]
return DataFrame ( new_data , self . index )
elif isinstance ( columns , list ) :
check_inner_types ( columns , str )
df = self
for column in columns :
df = df . drop ( column )
return df
else :
raise TypeError ( 'Expected columns as a str or a list of str' )
|
def describe ( self , fields = None , ** kwargs ) :
""": param fields : dict where the keys are field names that should
be returned , and values should be set to True ( by default ,
all fields are returned )
: type fields : dict
: returns : Description of the analysis
: rtype : dict
Returns a hash with key - value pairs containing information
about the analysis"""
|
describe_input = { }
if fields is not None :
describe_input [ 'fields' ] = fields
self . _desc = dxpy . api . analysis_describe ( self . _dxid , describe_input , ** kwargs )
return self . _desc
|
def export ( outfile ) :
"""Export image anchore data to a JSON file ."""
|
if not nav :
sys . exit ( 1 )
ecode = 0
savelist = list ( )
for imageId in imagelist :
try :
record = { }
record [ 'image' ] = { }
record [ 'image' ] [ 'imageId' ] = imageId
record [ 'image' ] [ 'imagedata' ] = contexts [ 'anchore_db' ] . load_image_new ( imageId )
savelist . append ( record )
except Exception as err :
anchore_print_err ( "could not find record for image (" + str ( imageId ) + ")" )
ecode = 1
if ecode == 0 :
try :
if outfile == '-' :
print json . dumps ( savelist , indent = 4 )
else :
with open ( outfile , 'w' ) as OFH :
OFH . write ( json . dumps ( savelist ) )
except Exception as err :
anchore_print_err ( "operation failed: " + str ( err ) )
ecode = 1
sys . exit ( ecode )
|
def usable_id ( cls , id ) :
"""Retrieve id from input which can be num or id ."""
|
try :
qry_id = int ( id )
except Exception :
qry_id = None
if not qry_id :
msg = 'unknown identifier %s' % id
cls . error ( msg )
return qry_id
|
def colorize ( string , rgb = None , ansi = None , bg = None , ansi_bg = None , fd = 1 ) :
'''Returns the colored string to print on the terminal .
This function detects the terminal type and if it is supported and the
output is not going to a pipe or a file , then it will return the colored
string , otherwise it will return the string without modifications .
string = the string to print . Only accepts strings , unicode strings must
be encoded in advance .
rgb = Rgb color for the text ; for example 0xFF0000 is red .
ansi = Ansi for the text
bg = Rgb color for the background
ansi _ bg = Ansi color for the background
fd = The file descriptor that will be used by print , by default is the
stdout'''
|
# Reinitializes if fd used is different
if colorize . fd != fd :
colorize . init = False
colorize . fd = fd
# Checks if it is on a terminal , and if the terminal is recognized
if not colorize . init :
colorize . init = True
colorize . is_term = isatty ( fd )
if 'TERM' in environ :
if environ [ 'TERM' ] . startswith ( 'xterm' ) :
colorize . cmap = XTermColorMap ( )
elif environ [ 'TERM' ] == 'vt100' :
colorize . cmap = VT100ColorMap ( )
else :
colorize . is_term = False
else :
colorize . is_term = False
if colorize . is_term :
string = colorize . cmap . colorize ( string , rgb , ansi , bg , ansi_bg )
return string
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.