signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def deserialize_compact ( jwt ) :
"""Deserialization of a compact representation of a : class : ` ~ jwt . JWE `
: param jwt : The serialized JWT to deserialize .
: rtype : : class : ` ~ jose . JWT ` .
: raises : : class : ` ~ jose . Error ` if the JWT is malformed""" | parts = jwt . split ( '.' )
if len ( parts ) == 3 :
token_type = JWS
elif len ( parts ) == 5 :
token_type = JWE
else :
raise Error ( 'Malformed JWT' )
return token_type ( * parts ) |
def update ( self ) :
'''Updates Image .''' | if self . hasproxy ( ) :
img = Image ( )
imageData = self . proxy . getImageData ( self . imgFormat )
img . height = imageData . description . height
img . width = imageData . description . width
img . format = imageData . description . format
img . data = np . frombuffer ( imageData . pixelData , dtype = np . uint8 )
img . data . shape = img . height , img . width , 3
img . timeStamp = imageData . timeStamp . seconds + imageData . timeStamp . useconds * 1e-9
self . lock . acquire ( )
self . image = img
self . lock . release ( ) |
def date_time ( self , tzinfo = None , end_datetime = None ) :
"""Get a datetime object for a date between January 1 , 1970 and now
: param tzinfo : timezone , instance of datetime . tzinfo subclass
: example DateTime ( ' 2005-08-16 20:39:21 ' )
: return datetime""" | # NOTE : On windows , the lowest value you can get from windows is 86400
# on the first day . Known python issue :
# https : / / bugs . python . org / issue30684
return datetime ( 1970 , 1 , 1 , tzinfo = tzinfo ) + timedelta ( seconds = self . unix_time ( end_datetime = end_datetime ) ) |
def get_log_hierarchy_design_session ( self , proxy ) :
"""Gets the ` ` OsidSession ` ` associated with the log hierarchy design service .
arg : proxy ( osid . proxy . Proxy ) : a proxy
return : ( osid . logging . LogHierarchyDesignSession ) - a
` ` HierarchyDesignSession ` ` for logs
raise : NullArgument - ` ` proxy ` ` is ` ` null ` `
raise : OperationFailed - unable to complete request
raise : Unimplemented - ` ` supports _ log _ hierarchy _ design ( ) ` ` is
` ` false ` `
* compliance : optional - - This method must be implemented if
` ` supports _ log _ hierarchy _ design ( ) ` ` is ` ` true ` ` . *""" | if not self . supports_log_hierarchy_design ( ) :
raise errors . Unimplemented ( )
# pylint : disable = no - member
return sessions . LogHierarchyDesignSession ( proxy = proxy , runtime = self . _runtime ) |
def set_title ( self , name ) :
"""Sets song ' s title
: param name : title""" | self . _set_attr ( TIT2 ( encoding = 3 , text = name . decode ( 'utf-8' ) ) ) |
def backend_add ( cls , name , backend ) :
"""Add a backend into a webaccelerator""" | oper = cls . call ( 'hosting.rproxy.server.create' , cls . usable_id ( name ) , backend )
cls . echo ( 'Adding backend %s:%s into webaccelerator' % ( backend [ 'ip' ] , backend [ 'port' ] ) )
cls . display_progress ( oper )
cls . echo ( 'Backend added' )
return oper |
def raw_p_sha1 ( secret , seed , sizes = ( ) ) :
"""Derive one or more keys from secret and seed .
( See specs part 6 , 6.7.5 and RFC 2246 - TLS v1.0)
Lengths of keys will match sizes argument
Source : https : / / github . com / FreeOpcUa / python - opcua
key _ sizes = ( signature _ key _ size , symmetric _ key _ size , 16)
( sigkey , key , init _ vec ) = p _ sha1 ( nonce2 , nonce1 , key _ sizes )""" | full_size = 0
for size in sizes :
full_size += size
result = b''
accum = seed
while len ( result ) < full_size :
accum = hmac_sha1 ( secret , accum )
result += hmac_sha1 ( secret , accum + seed )
parts = [ ]
for size in sizes :
parts . append ( result [ : size ] )
result = result [ size : ]
return tuple ( parts ) |
def handle_error ( self , e ) :
"""Error handler for the API transforms a raised exception into a Flask
response , with the appropriate HTTP status code and body .
: param e : the raised Exception object
: type e : Exception""" | got_request_exception . send ( current_app . _get_current_object ( ) , exception = e )
if not isinstance ( e , HTTPException ) and current_app . propagate_exceptions :
exc_type , exc_value , tb = sys . exc_info ( )
if exc_value is e :
raise
else :
raise e
headers = Headers ( )
if isinstance ( e , HTTPException ) :
code = e . code
default_data = { 'message' : getattr ( e , 'description' , http_status_message ( code ) ) }
headers = e . get_response ( ) . headers
else :
code = 500
default_data = { 'message' : http_status_message ( code ) , }
# Werkzeug exceptions generate a content - length header which is added
# to the response in addition to the actual content - length header
# https : / / github . com / flask - restful / flask - restful / issues / 534
remove_headers = ( 'Content-Length' , )
for header in remove_headers :
headers . pop ( header , None )
data = getattr ( e , 'data' , default_data )
if code and code >= 500 :
exc_info = sys . exc_info ( )
if exc_info [ 1 ] is None :
exc_info = None
current_app . log_exception ( exc_info )
error_cls_name = type ( e ) . __name__
if error_cls_name in self . errors :
custom_data = self . errors . get ( error_cls_name , { } )
code = custom_data . get ( 'status' , 500 )
data . update ( custom_data )
if code == 406 and self . default_mediatype is None : # if we are handling NotAcceptable ( 406 ) , make sure that
# make _ response uses a representation we support as the
# default mediatype ( so that make _ response doesn ' t throw
# another NotAcceptable error ) .
supported_mediatypes = list ( self . representations . keys ( ) )
fallback_mediatype = supported_mediatypes [ 0 ] if supported_mediatypes else "text/plain"
resp = self . make_response ( data , code , headers , fallback_mediatype = fallback_mediatype )
else :
resp = self . make_response ( data , code , headers )
if code == 401 :
resp = self . unauthorized ( resp )
return resp |
def pipe_loop ( context = None , _INPUT = None , conf = None , embed = None , ** kwargs ) :
"""An operator that loops over the input and performs the embedded
submodule . Not loopable .
Parameters
context : pipe2py . Context object
_ INPUT : pipe2py . modules pipe like object ( iterable of items )
embed : the submodule , i . e . , pipe _ * ( context , _ INPUT , conf )
Most modules , with the exception of User inputs and Operators can be
sub - modules .
conf : {
' assign _ part ' : { ' value ' : < all or first > } ,
' assign _ to ' : { ' value ' : < assigned field name > } ,
' emit _ part ' : { ' value ' : < all or first > } ,
' mode ' : { ' value ' : < assign or EMIT > } ,
' with ' : { ' value ' : < looped field name or blank > } ,
' embed ' : { ' value ' : { ' conf ' : < module conf > } }
Returns
_ OUTPUT : generator of items""" | cust_func = get_cust_func ( context , conf , embed , parse_embed , ** kwargs )
opts . update ( { 'cust_func' : cust_func } )
splits = get_splits ( _INPUT , conf , ** cdicts ( opts , kwargs ) )
gathered = starmap ( parse_result , splits )
_OUTPUT = utils . multiplex ( gathered )
return _OUTPUT |
def create ( self , dcid , vpsplanid , osid , params = None ) :
'''/ v1 / server / create
POST - account
Create a new virtual machine . You will start being billed for this
immediately . The response only contains the SUBID for the new machine .
You should use v1 / server / list to poll and wait for the machine to be
created ( as this does not happen instantly ) .
Link : https : / / www . vultr . com / api / # server _ create''' | params = update_params ( params , { 'DCID' : dcid , 'VPSPLANID' : vpsplanid , 'OSID' : osid } )
return self . request ( '/v1/server/create' , params , 'POST' ) |
def findOverlay ( self , pchOverlayKey ) :
"""Finds an existing overlay with the specified key .""" | fn = self . function_table . findOverlay
pOverlayHandle = VROverlayHandle_t ( )
result = fn ( pchOverlayKey , byref ( pOverlayHandle ) )
return result , pOverlayHandle |
def retrieve ( self , request , project , pk = None ) :
"""GET method implementation for detail view of ` ` push ` `""" | try :
push = Push . objects . get ( repository__name = project , id = pk )
serializer = PushSerializer ( push )
return Response ( serializer . data )
except Push . DoesNotExist :
return Response ( "No push with id: {0}" . format ( pk ) , status = HTTP_404_NOT_FOUND ) |
def setup_icons ( self , ) :
"""Setup the icons of the ui
: returns : None
: rtype : None
: raises : None""" | iconbtns = [ ( "menu_border_24x24.png" , self . menu_tb ) , ( "duplicate_border_24x24.png" , self . duplicate_tb ) , ( "delete_border_24x24.png" , self . delete_tb ) , ( "reference_border_24x24.png" , self . reference_tb ) , ( "load_border_24x24.png" , self . load_tb ) , ( "unload_border_24x24.png" , self . unload_tb ) , ( "replace_border_24x24.png" , self . replace_tb ) , ( "import_border_24x24.png" , self . importref_tb ) , ( "import_border_24x24.png" , self . importtf_tb ) , ( "alien.png" , self . alien_tb ) , ( "imported.png" , self . imported_tb ) ]
for iconname , btn in iconbtns :
i = get_icon ( iconname , asicon = True )
btn . setIcon ( i ) |
def format_pattrs ( pattrs : List [ 'api.PrettyAttribute' ] ) -> str :
"""Generates repr string given a list of pattrs .""" | output = [ ]
pattrs . sort ( key = lambda x : ( _FORMATTER [ x . display_group ] . display_index , x . display_group , x . name , ) )
for display_group , grouped_pattrs in groupby ( pattrs , lambda x : x . display_group ) :
output . append ( _FORMATTER [ display_group ] . formatter ( display_group , grouped_pattrs ) )
return '\n' . join ( output ) |
def open_by_pat ( self , name , mode = 'r' , ** kwargs ) :
'''opens the file for the pattern given by * name * ,
substituting the object ' s properties and the
additional keyword arguments given .''' | fname = self . fname_by_pat ( name , ** kwargs )
if mode == 'w' :
print >> sys . stderr , "Write[%s]: %s" % ( name , fname )
else :
print >> sys . stderr , "Open[%s]: %s" % ( name , fname )
return file ( fname , mode ) |
def reload ( self ) :
"""If there is an LDAP connection , query it for another
instance of this member and set its internal dictionary
to that result .""" | if not self . ldap :
return
self . memberDict = self . ldap . member ( self . uid ) |
def parseXRDS ( text ) :
"""Parse the given text as an XRDS document .
@ return : ElementTree containing an XRDS document
@ raises XRDSError : When there is a parse error or the document does
not contain an XRDS .""" | try : # lxml prefers to parse bytestrings , and occasionally chokes on a
# combination of text strings and declared XML encodings - - see
# https : / / github . com / necaris / python3 - openid / issues / 19
# To avoid this , we ensure that the ' text ' we ' re parsing is actually
# a bytestring
bytestring = text . encode ( 'utf8' ) if isinstance ( text , str ) else text
element = SafeElementTree . XML ( bytestring )
except ( SystemExit , MemoryError , AssertionError , ImportError ) :
raise
except Exception as why :
exc = XRDSError ( 'Error parsing document as XML' )
exc . reason = why
raise exc
else :
tree = ElementTree . ElementTree ( element )
if not isXRDS ( tree ) :
raise XRDSError ( 'Not an XRDS document' )
return tree |
def Modified ( self ) :
"""Also updates the state of the containing oneof in the parent message .""" | try :
self . _parent_message_weakref . _UpdateOneofState ( self . _field )
super ( _OneofListener , self ) . Modified ( )
except ReferenceError :
pass |
def find_extensions_in ( path : typing . Union [ str , pathlib . Path ] ) -> list :
"""Tries to find things that look like bot extensions in a directory .""" | if not isinstance ( path , pathlib . Path ) :
path = pathlib . Path ( path )
if not path . is_dir ( ) :
return [ ]
extension_names = [ ]
# Find extensions directly in this folder
for subpath in path . glob ( '*.py' ) :
parts = subpath . with_suffix ( '' ) . parts
if parts [ 0 ] == '.' :
parts = parts [ 1 : ]
extension_names . append ( '.' . join ( parts ) )
# Find extensions as subfolder modules
for subpath in path . glob ( '*/__init__.py' ) :
parts = subpath . parent . parts
if parts [ 0 ] == '.' :
parts = parts [ 1 : ]
extension_names . append ( '.' . join ( parts ) )
return extension_names |
def deblend_sources ( data , segment_img , npixels , filter_kernel = None , labels = None , nlevels = 32 , contrast = 0.001 , mode = 'exponential' , connectivity = 8 , relabel = True ) :
"""Deblend overlapping sources labeled in a segmentation image .
Sources are deblended using a combination of multi - thresholding and
` watershed segmentation
< https : / / en . wikipedia . org / wiki / Watershed _ ( image _ processing ) > ` _ . In
order to deblend sources , they must be separated enough such that
there is a saddle between them .
Parameters
data : array _ like
The 2D array of the image .
segment _ img : ` ~ photutils . segmentation . SegmentationImage ` or array _ like ( int )
A 2D segmentation image , either as a
` ~ photutils . segmentation . SegmentationImage ` object or an
` ~ numpy . ndarray ` , with the same shape as ` ` data ` ` where sources
are labeled by different positive integer values . A value of
zero is reserved for the background .
npixels : int
The number of connected pixels , each greater than ` ` threshold ` ` ,
that an object must have to be detected . ` ` npixels ` ` must be a
positive integer .
filter _ kernel : array - like ( 2D ) or ` ~ astropy . convolution . Kernel2D ` , optional
The 2D array of the kernel used to filter the image before
thresholding . Filtering the image will smooth the noise and
maximize detectability of objects with a shape similar to the
kernel .
labels : int or array - like of int , optional
The label numbers to deblend . If ` None ` ( default ) , then all
labels in the segmentation image will be deblended .
nlevels : int , optional
The number of multi - thresholding levels to use . Each source
will be re - thresholded at ` ` nlevels ` ` , spaced exponentially or
linearly ( see the ` ` mode ` ` keyword ) , between its minimum and
maximum values within the source segment .
contrast : float , optional
The fraction of the total ( blended ) source flux that a local
peak must have to be considered as a separate object .
` ` contrast ` ` must be between 0 and 1 , inclusive . If ` ` contrast
= 0 ` ` then every local peak will be made a separate object
( maximum deblending ) . If ` ` contrast = 1 ` ` then no deblending
will occur . The default is 0.001 , which will deblend sources
with a magnitude difference of about 7.5.
mode : { ' exponential ' , ' linear ' } , optional
The mode used in defining the spacing between the
multi - thresholding levels ( see the ` ` nlevels ` ` keyword ) . The
default is ' exponential ' .
connectivity : { 8 , 4 } , optional
The type of pixel connectivity used in determining how pixels
are grouped into a detected source . The options are 8 ( default )
or 4 . 8 - connected pixels touch along their edges or corners .
4 - connected pixels touch along their edges . For reference ,
SExtractor uses 8 - connected pixels .
relabel : bool
If ` True ` ( default ) , then the segmentation image will be
relabeled such that the labels are in consecutive order starting
from 1.
Returns
segment _ image : ` ~ photutils . segmentation . SegmentationImage `
A 2D segmentation image , with the same shape as ` ` data ` ` , where
sources are marked by different positive integer values . A
value of zero is reserved for the background .
See Also
: func : ` photutils . detect _ sources `""" | if not isinstance ( segment_img , SegmentationImage ) :
segment_img = SegmentationImage ( segment_img )
if segment_img . shape != data . shape :
raise ValueError ( 'The data and segmentation image must have ' 'the same shape' )
if labels is None :
labels = segment_img . labels
labels = np . atleast_1d ( labels )
segment_img . check_labels ( labels )
data = filter_data ( data , filter_kernel , mode = 'constant' , fill_value = 0.0 )
last_label = segment_img . max_label
segm_deblended = deepcopy ( segment_img )
for label in labels :
source_slice = segment_img . slices [ segment_img . get_index ( label ) ]
source_data = data [ source_slice ]
source_segm = SegmentationImage ( np . copy ( segment_img . data [ source_slice ] ) )
source_segm . keep_labels ( label )
# include only one label
source_deblended = _deblend_source ( source_data , source_segm , npixels , nlevels = nlevels , contrast = contrast , mode = mode , connectivity = connectivity )
if not np . array_equal ( source_deblended . data . astype ( bool ) , source_segm . data . astype ( bool ) ) :
raise ValueError ( 'Deblending failed for source "{0}". Please ' 'ensure you used the same pixel connectivity ' 'in detect_sources and deblend_sources. If ' 'this issue persists, then please inform the ' 'developers.' . format ( label ) )
if source_deblended . nlabels > 1 : # replace the original source with the deblended source
source_mask = ( source_deblended . data > 0 )
segm_tmp = segm_deblended . data
segm_tmp [ source_slice ] [ source_mask ] = ( source_deblended . data [ source_mask ] + last_label )
segm_deblended . data = segm_tmp
# needed to call data setter
last_label += source_deblended . nlabels
if relabel :
segm_deblended . relabel_consecutive ( )
return segm_deblended |
def _send_event ( self , event ) :
"""! @ brief Process event objects and decide when to send to event sink .
This method handles the logic to associate a timestamp event with the prior other
event . A list of pending events is built up until either a timestamp or overflow event
is generated , at which point all pending events are flushed to the event sink . If a
timestamp is seen , the timestamp of all pending events is set prior to flushing .""" | flush = False
# Handle merging data trace events .
if self . _merge_data_trace_events ( event ) :
return
if isinstance ( event , events . TraceTimestamp ) :
for ev in self . _pending_events :
ev . timestamp = event . timestamp
flush = True
else :
self . _pending_events . append ( event )
if isinstance ( event , events . TraceOverflow ) :
flush = True
if flush :
self . _flush_events ( ) |
def checksum_creation_action ( target , source , env ) :
"""Create a linker command file for patching an application checksum into a firmware image""" | # Important Notes :
# There are apparently many ways to calculate a CRC - 32 checksum , we use the following options
# Initial seed value prepended to the input : 0xFFFFF
# Whether the input is fed into the shift register least - significant bit or most - significant bit first : LSB
# Whether each data word is inverted : No
# Whether the final CRC value is inverted : No
# * These settings must agree between the executive and this function *
import crcmod
crc32_func = crcmod . mkCrcFun ( 0x104C11DB7 , initCrc = 0xFFFFFFFF , rev = False , xorOut = 0 )
with open ( str ( source [ 0 ] ) , 'rb' ) as f :
data = f . read ( )
# Ignore the last four bytes of the file since that is where the checksum will go
data = data [ : - 4 ]
# Make sure the magic number is correct so that we ' re dealing with an actual firmware image
magicbin = data [ - 4 : ]
magic , = struct . unpack ( '<L' , magicbin )
if magic != 0xBAADDAAD :
raise BuildError ( "Attempting to patch a file that is not a CDB binary or has the wrong size" , reason = "invalid magic number found" , actual_magic = magic , desired_magic = 0xBAADDAAD )
# Calculate CRC32 in the same way as its done in the target microcontroller
checksum = crc32_func ( data ) & 0xFFFFFFFF
with open ( str ( target [ 0 ] ) , 'w' ) as f : # hex strings end with L on windows and possibly some other systems
checkhex = hex ( checksum )
if checkhex [ - 1 ] == 'L' :
checkhex = checkhex [ : - 1 ]
f . write ( "--defsym=__image_checksum=%s\n" % checkhex ) |
def resize_bytes ( fobj , old_size , new_size , offset ) :
"""Resize an area in a file adding and deleting at the end of it .
Does nothing if no resizing is needed .
Args :
fobj ( fileobj )
old _ size ( int ) : The area starting at offset
new _ size ( int ) : The new size of the area
offset ( int ) : The start of the area
Raises :
IOError""" | if new_size < old_size :
delete_size = old_size - new_size
delete_at = offset + new_size
delete_bytes ( fobj , delete_size , delete_at )
elif new_size > old_size :
insert_size = new_size - old_size
insert_at = offset + old_size
insert_bytes ( fobj , insert_size , insert_at ) |
def refreshDetails ( self ) :
"""Refreshes the results for the details view of the browser .""" | # start off by filtering based on the group selection
tree = self . uiRecordsTREE
tree . blockSignals ( True )
tree . setRecordSet ( self . records ( ) )
tree . blockSignals ( False ) |
def setup_server ( clear_old = False , repo = "github" , python = "/usr/bin/python3.6" ) :
"""Setup server""" | if repo == 'github' :
url_keys = 'https://github.com/ArabellaTech/%s/settings/keys' % env . repo_name
url_clone = 'git@github.com:ArabellaTech/%s.git' % env . repo_name
elif repo == 'bitbucket' :
url_keys = 'https://bitbucket.org/arabellatech/%s/admin/deploy-keys/' % env . repo_name
url_clone = 'git@bitbucket.org:arabellatech/%s.git' % env . repo_name
else :
raise NotImplementedError ( 'Unknown repo type' )
if clear_old :
sudo ( 'userdel -r %s' % env . remote_user )
sudo ( 'useradd --shell /bin/bash --create-home %s' % env . remote_user , user = 'root' )
sudo ( 'ssh-keygen -t rsa -P "" -f /home/%s/.ssh/id_rsa' % env . remote_user , user = env . remote_user )
sudo ( 'cp -f /home/%s/.ssh/id_rsa.pub ~/key.tmp' % env . remote_user , user = 'root' )
key = sudo ( 'cat ~/key.tmp' , user = 'root' )
sudo ( 'rm ~/key.tmp' , user = 'root' )
print ( red ( 'Please put following deploy key in %s' % url_keys ) )
print ( key )
prompt ( red ( 'Press any key to continue' ) )
sudo ( 'export WORKON_HOME=/home/%s/Envs &&\
source /usr/local/bin/virtualenvwrapper_lazy.sh &&\
mkvirtualenv %s --no-site-packages -p %s' % ( env . remote_user , env . app_dir , python ) , warn_only = True , user = env . remote_user )
sudo ( 'cd /home/%s/ && git clone %s www' % ( env . remote_user , url_clone ) , user = env . remote_user )
with cd ( env . remote_path ) :
sudo ( 'git checkout %s' % env . branch , user = env . remote_user )
sudo ( 'git pull' , user = env . remote_user )
sudo ( 'cd %s && ln -sf ../config/%s/yd_local_settings.py local_settings.py' % ( env . app_dir , env . environment ) , user = env . remote_user )
sudo ( env . pip + ' install -r requirements.txt --no-cache-dir' , user = env . remote_user )
if django . VERSION >= ( 1 , 8 ) :
sudo ( env . python + ' manage.py migrate' , user = env . remote_user )
else :
sudo ( env . python + ' manage.py syncdb --migrate' , user = env . remote_user )
sudo ( 'cd config && ln -sf %s/logrotate.conf logrotate.conf' % ( env . environment ) )
# try installing npm
# install npm modules
# sudo ( ' / bin / bash . / scripts / fab _ build _ bower _ npm . sh ' + env . remote _ user , user = env . remote _ user , warn _ only = True )
# fix angular for webkit
# sudo ( ' / home / ' + env . remote _ user + ' / www / node _ modules / . bin / webkit - assign / home / ' + env . remote _ user +
# ' / www / nutrimom / static / libs / bower _ components / angular / angular . js ' , user = env . remote _ user , warn _ only = True )
# build js
# sudo ( ' grunt build - js ' , warn _ only = True )
sudo ( env . python + ' manage.py collectstatic -v0 --noinput' , user = env . remote_user )
sudo ( env . python + ' manage.py compress -f' , user = env . remote_user )
params = ( env . remote_user , env . environment , env . remote_user )
sudo ( 'cd /etc/nginx/sites-enabled && ln -sf /home/%s/www/config/%s/nginx.conf %s.conf' % params , user = 'root' )
sudo ( 'cd /etc/supervisor/conf.d/ && ln -sf /home/%s/www/config/%s/supervisord.conf %s.conf' % params , user = 'root' )
sudo ( '/etc/init.d/nginx reload' , user = 'root' )
sudo ( 'supervisorctl reread && supervisorctl update' , user = 'root' )
update_cron ( ) |
def view_all_work_queues ( ) :
"""Page for viewing the index of all active work queues .""" | count_list = list ( db . session . query ( work_queue . WorkQueue . queue_name , work_queue . WorkQueue . status , func . count ( work_queue . WorkQueue . task_id ) ) . group_by ( work_queue . WorkQueue . queue_name , work_queue . WorkQueue . status ) )
queue_dict = { }
for name , status , count in count_list :
queue_dict [ ( name , status ) ] = dict ( name = name , status = status , count = count )
max_created_list = list ( db . session . query ( work_queue . WorkQueue . queue_name , work_queue . WorkQueue . status , func . max ( work_queue . WorkQueue . created ) ) . group_by ( work_queue . WorkQueue . queue_name , work_queue . WorkQueue . status ) )
for name , status , newest_created in max_created_list :
queue_dict [ ( name , status ) ] [ 'newest_created' ] = newest_created
min_eta_list = list ( db . session . query ( work_queue . WorkQueue . queue_name , work_queue . WorkQueue . status , func . min ( work_queue . WorkQueue . eta ) ) . group_by ( work_queue . WorkQueue . queue_name , work_queue . WorkQueue . status ) )
for name , status , oldest_eta in min_eta_list :
queue_dict [ ( name , status ) ] [ 'oldest_eta' ] = oldest_eta
queue_list = list ( queue_dict . values ( ) )
queue_list . sort ( key = lambda x : ( x [ 'name' ] , x [ 'status' ] ) )
context = dict ( queue_list = queue_list , )
return render_template ( 'view_work_queue_index.html' , ** context ) |
def _run_xmlsec ( self , com_list , extra_args ) :
"""Common code to invoke xmlsec and parse the output .
: param com _ list : Key - value parameter list for xmlsec
: param extra _ args : Positional parameters to be appended after all
key - value parameters
: result : Whatever xmlsec wrote to an - - output temporary file""" | with NamedTemporaryFile ( suffix = '.xml' , delete = self . _xmlsec_delete_tmpfiles ) as ntf :
com_list . extend ( [ '--output' , ntf . name ] )
com_list += extra_args
logger . debug ( 'xmlsec command: %s' , ' ' . join ( com_list ) )
pof = Popen ( com_list , stderr = PIPE , stdout = PIPE )
p_out , p_err = pof . communicate ( )
p_out = p_out . decode ( )
p_err = p_err . decode ( )
if pof . returncode != 0 :
errmsg = "returncode={code}\nerror={err}\noutput={out}" . format ( code = pof . returncode , err = p_err , out = p_out )
logger . error ( errmsg )
raise XmlsecError ( errmsg )
ntf . seek ( 0 )
return p_out , p_err , ntf . read ( ) |
def dump_normals ( dataset_dir , data_dir , dataset , root = None , compress = True ) :
"""dump vtkjs normal vectors""" | if root is None :
root = { }
normals = dataset . GetPointData ( ) . GetNormals ( )
if normals :
dumped_array = dump_data_array ( dataset_dir , data_dir , normals , { } , compress )
root [ 'pointData' ] [ 'activeNormals' ] = len ( root [ 'pointData' ] [ 'arrays' ] )
root [ 'pointData' ] [ 'arrays' ] . append ( { 'data' : dumped_array } ) |
async def on_message ( message ) :
"""The on _ message event handler for this module
Args :
message ( discord . Message ) : Input message""" | # Simplify message info
server = message . server
author = message . author
channel = message . channel
content = message . content
data = datatools . get_data ( )
# Only reply to server messages and don ' t reply to myself
if server is not None and author != channel . server . me :
prefix = data [ "discord" ] [ "servers" ] [ server . id ] [ "prefix" ]
# Check for mentions reply to mentions
if channel . server . me in message . mentions :
await client . send_typing ( channel )
response = "The current server prefix is `{0}`. Type `{0}help` for help." . format ( prefix )
await client . send_message ( channel , response )
# Commands section
if content . startswith ( prefix ) : # Parse message
package = content . split ( " " )
command = package [ 0 ] [ len ( prefix ) : ]
args = package [ 1 : ]
arg = ' ' . join ( args )
# Commands
if command not in [ "prefix" , "activate" , "deactivate" , "warnmax" , "warn" , "ban" ] :
return
is_admin = author == server . owner
for role in message . author . roles :
if role . permissions . administrator :
is_admin = True
if not is_admin :
await client . send_typing ( channel )
reason = "You must have a role that has the permission 'Administrator'"
embed = ui_embed . error ( channel , "Insufficient Permissions" , reason )
await embed . send ( )
return
if command == "prefix" and args :
new_prefix = arg . replace ( " " , "" ) . strip ( )
data [ "discord" ] [ "servers" ] [ server . id ] [ "prefix" ] = new_prefix
# Write the data
datatools . write_data ( data )
await client . send_typing ( channel )
embed = ui_embed . modify_prefix ( channel , new_prefix )
await embed . send ( )
if command == "warnmax" and args :
try :
warn_max = int ( arg )
if warn_max > 0 :
data [ "discord" ] [ "servers" ] [ server . id ] [ _data . modulename ] [ "warnings_max" ] = warn_max
datatools . write_data ( data )
await client . send_typing ( channel )
embed = ui_embed . warning_max_changed ( channel , warn_max )
await embed . send ( )
else :
reason = "Maximum warnings must be greater than 0"
embed = ui_embed . error ( channel , "Error" , reason )
await embed . send ( )
except ( ValueError , TypeError ) :
reason = "Warning maximum must be a number"
embed = ui_embed . error ( channel , "Error" , reason )
await embed . send ( )
except Exception as e :
logger . exception ( e )
if command == "warn" and args :
for user in message . mentions :
await api_manager . warn_user ( channel , user )
if command == "ban" and args :
for user in message . mentions :
await api_manager . ban_user ( channel , user )
if command == "activate" and args :
await api_manager . activate_module ( channel , arg , True )
elif command == "deactivate" and args :
await api_manager . activate_module ( channel , arg , False ) |
def find_studies ( self , query_dict = None , exact = False , verbose = False , ** kwargs ) :
"""Query on study properties . See documentation for _ OTIWrapper class .""" | if self . use_v1 :
uri = '{p}/singlePropertySearchForStudies' . format ( p = self . query_prefix )
else :
uri = '{p}/find_studies' . format ( p = self . query_prefix )
return self . _do_query ( uri , query_dict = query_dict , exact = exact , verbose = verbose , valid_keys = self . study_search_term_set , kwargs = kwargs ) |
def getlist ( self , key , delimiter = ',' , ** kwargs ) :
"""Gets the setting value as a : class : ` list ` ; it splits the string using ` ` delimiter ` ` .
: param str delimiter : split the value using this delimiter
: rtype : list""" | value = self . get ( key , ** kwargs )
if value is None :
return value
if isinstance ( value , str ) :
value = value . strip ( )
if value . startswith ( '[' ) and value . endswith ( ']' ) :
return self . getserialized ( key )
return [ p . strip ( ' ' ) for p in value . split ( delimiter ) ]
# end if
return list ( value ) |
def get_nice_name ( name ) :
"""Converts a string to nice string : * * currentLogText * * - > * * Current Log Text * * .
Usage : :
> > > get _ nice _ name ( " getMeANiceName " )
u ' Get Me A Nice Name '
> > > get _ nice _ name ( " _ _ getMeANiceName " )
u ' _ _ Get Me A Nice Name '
: param name : Current string to be nicified .
: type name : unicode
: return : Nicified string .
: rtype : unicode""" | chunks = re . sub ( r"(.)([A-Z][a-z]+)" , r"\1 \2" , name )
return " " . join ( element . title ( ) for element in re . sub ( r"([a-z0-9])([A-Z])" , r"\1 \2" , chunks ) . split ( ) ) |
def append ( self , other ) :
"""Add the rows of an SFrame to the end of this SFrame .
Both SFrames must have the same set of columns with the same column
names and column types .
Parameters
other : SFrame
Another SFrame whose rows are appended to the current SFrame .
Returns
out : SFrame
The result SFrame from the append operation .
Examples
> > > sf = turicreate . SFrame ( { ' id ' : [ 4 , 6 , 8 ] , ' val ' : [ ' D ' , ' F ' , ' H ' ] } )
> > > sf2 = turicreate . SFrame ( { ' id ' : [ 1 , 2 , 3 ] , ' val ' : [ ' A ' , ' B ' , ' C ' ] } )
> > > sf = sf . append ( sf2)
> > > sf
| id | val |
| 4 | D |
| 6 | F |
| 8 | H |
| 1 | A |
| 2 | B |
| 3 | C |
[6 rows x 2 columns ]""" | if type ( other ) is not SFrame :
raise RuntimeError ( "SFrame append can only work with SFrame" )
left_empty = len ( self . column_names ( ) ) == 0
right_empty = len ( other . column_names ( ) ) == 0
if ( left_empty and right_empty ) :
return SFrame ( )
if ( left_empty or right_empty ) :
non_empty_sframe = self if right_empty else other
return non_empty_sframe . __copy__ ( )
my_column_names = self . column_names ( )
my_column_types = self . column_types ( )
other_column_names = other . column_names ( )
if ( len ( my_column_names ) != len ( other_column_names ) ) :
raise RuntimeError ( "Two SFrames have to have the same number of columns" )
# check if the order of column name is the same
column_name_order_match = True
for i in range ( len ( my_column_names ) ) :
if other_column_names [ i ] != my_column_names [ i ] :
column_name_order_match = False
break
processed_other_frame = other
if not column_name_order_match : # we allow name order of two sframes to be different , so we create a new sframe from
# " other " sframe to make it has exactly the same shape
processed_other_frame = SFrame ( )
for i in range ( len ( my_column_names ) ) :
col_name = my_column_names [ i ]
if ( col_name not in other_column_names ) :
raise RuntimeError ( "Column " + my_column_names [ i ] + " does not exist in second SFrame" )
other_column = other . select_column ( col_name )
processed_other_frame . add_column ( other_column , col_name , inplace = True )
# check column type
if my_column_types [ i ] != other_column . dtype :
raise RuntimeError ( "Column " + my_column_names [ i ] + " type is not the same in two SFrames, one is " + str ( my_column_types [ i ] ) + ", the other is " + str ( other_column . dtype ) )
with cython_context ( ) :
return SFrame ( _proxy = self . __proxy__ . append ( processed_other_frame . __proxy__ ) ) |
def run_once ( self ) :
"""This function runs one iteration of the IRC client . This is called in a loop
by the run _ loop function . It can be called separately , but most of the
time there is no need to do this .""" | packet = _parse_irc_packet ( next ( self . lines ) )
# Get next line from generator
for event_handler in list ( self . on_packet_received ) :
event_handler ( self , packet )
if packet . command == "PRIVMSG" :
if packet . arguments [ 0 ] . startswith ( "#" ) :
for event_handler in list ( self . on_public_message ) :
event_handler ( self , packet . arguments [ 0 ] , packet . prefix . split ( "!" ) [ 0 ] , packet . arguments [ 1 ] )
else :
for event_handler in list ( self . on_private_message ) :
event_handler ( self , packet . prefix . split ( "!" ) [ 0 ] , packet . arguments [ 1 ] )
elif packet . command == "PING" :
self . send_line ( "PONG :{}" . format ( packet . arguments [ 0 ] ) )
for event_handler in list ( self . on_ping ) :
event_handler ( self )
elif packet . command == "433" or packet . command == "437" : # Command 433 is " Nick in use "
# Add underscore to the nick
self . set_nick ( "{}_" . format ( self . nick ) )
elif packet . command == "001" :
for event_handler in list ( self . on_welcome ) :
event_handler ( self )
elif packet . command == "JOIN" :
for event_handler in list ( self . on_join ) :
event_handler ( self , packet . arguments [ 0 ] , packet . prefix . split ( "!" ) [ 0 ] )
elif packet . command == "PART" :
for event_handler in list ( self . on_leave ) :
event_handler ( self , packet . arguments [ 0 ] , packet . prefix . split ( "!" ) [ 0 ] ) |
def modflow_read_hydmod_file ( hydmod_file , hydmod_outfile = None ) :
"""read in a binary hydmod file and return a dataframe of the results
Parameters
hydmod _ file : str
modflow hydmod binary file
hydmod _ outfile : str
output file to write . If None , use < hydmod _ file > . dat .
Default is None
Returns
df : pandas . DataFrame
pandas DataFrame with hymod _ file values
Note
requires flopy""" | try :
import flopy . utils as fu
except Exception as e :
print ( 'flopy is not installed - cannot read {0}\n{1}' . format ( hydmod_file , e ) )
return
# print ( ' Starting to read HYDMOD data from { 0 } ' . format ( hydmod _ file ) )
obs = fu . HydmodObs ( hydmod_file )
hyd_df = obs . get_dataframe ( )
hyd_df . columns = [ i [ 2 : ] if i . lower ( ) != 'totim' else i for i in hyd_df . columns ]
# hyd _ df . loc [ : , " datetime " ] = hyd _ df . index
hyd_df [ 'totim' ] = hyd_df . index . map ( lambda x : x . strftime ( "%Y%m%d" ) )
hyd_df . rename ( columns = { 'totim' : 'datestamp' } , inplace = True )
# reshape into a single column
hyd_df = pd . melt ( hyd_df , id_vars = 'datestamp' )
hyd_df . rename ( columns = { 'value' : 'obsval' } , inplace = True )
hyd_df [ 'obsnme' ] = [ i . lower ( ) + '_' + j . lower ( ) for i , j in zip ( hyd_df . variable , hyd_df . datestamp ) ]
vc = hyd_df . obsnme . value_counts ( ) . sort_values ( )
vc = list ( vc . loc [ vc > 1 ] . index . values )
if len ( vc ) > 0 :
hyd_df . to_csv ( "hyd_df.duplciates.csv" )
obs . get_dataframe ( ) . to_csv ( "hyd_org.duplicates.csv" )
raise Exception ( "duplicates in obsnme:{0}" . format ( vc ) )
# assert hyd _ df . obsnme . value _ counts ( ) . max ( ) = = 1 , " duplicates in obsnme "
if not hydmod_outfile :
hydmod_outfile = hydmod_file + '.dat'
hyd_df . to_csv ( hydmod_outfile , columns = [ 'obsnme' , 'obsval' ] , sep = ' ' , index = False )
# hyd _ df = hyd _ df [ [ ' obsnme ' , ' obsval ' ] ]
return hyd_df [ [ 'obsnme' , 'obsval' ] ] , hydmod_outfile |
def ormSessionCreator ( self ) -> DbSessionCreator :
"""Get Orm Session
: return : A SQLAlchemy session scoped for the callers thread . .""" | assert self . _dbConnectString
if self . _ScopedSession :
return self . _ScopedSession
self . _dbEngine = create_engine ( self . _dbConnectString , ** self . _dbEngineArgs )
self . _ScopedSession = scoped_session ( sessionmaker ( bind = self . _dbEngine ) )
return self . _ScopedSession |
def fit ( self , X , y , sample_weight = None ) :
"""Build a survival support vector machine model from training data .
Parameters
X : array - like , shape = ( n _ samples , n _ features )
Data matrix .
y : structured array , shape = ( n _ samples , )
A structured array containing the binary event indicator
as first field , and time of event or time of censoring as
second field .
sample _ weight : array - like , shape = ( n _ samples , ) , optional
Array of weights that are assigned to individual
samples . If not provided ,
then each sample is given unit weight .
Returns
self""" | random_state = check_random_state ( self . random_state )
x_pairs , y_pairs = self . _get_survival_pairs ( X , y , random_state )
self . C = self . alpha
return super ( ) . fit ( x_pairs , y_pairs , sample_weight = sample_weight ) |
def save ( self , * args , ** kwargs ) :
"""Extends model ` ` save ( ) ` ` to allow dynamic geocoding""" | self . geocode ( )
return super ( GeoMixin , self ) . save ( * args , ** kwargs ) |
def capture_on_device_name ( device_name , callback ) :
""": param device _ name : the name ( guid ) of a device as provided by WinPcapDevices . list _ devices ( )
: param callback : a function to call with each intercepted packet""" | with WinPcap ( device_name ) as capture :
capture . run ( callback = callback ) |
def verify ( self , pkt , key ) :
"""Check that the integrity check value ( icv ) of a packet is valid .
@ param pkt : a packet that contains a valid encrypted ESP or AH layer
@ param key : the authentication key , a byte string
@ raise IPSecIntegrityError : if the integrity check fails""" | if not self . mac or self . icv_size == 0 :
return
mac = self . new_mac ( key )
pkt_icv = 'not found'
computed_icv = 'not computed'
if isinstance ( pkt , ESP ) :
pkt_icv = pkt . data [ len ( pkt . data ) - self . icv_size : ]
clone = pkt . copy ( )
clone . data = clone . data [ : len ( clone . data ) - self . icv_size ]
elif pkt . haslayer ( AH ) :
if len ( pkt [ AH ] . icv ) != self . icv_size : # Fill padding since we know the actual icv _ size
pkt [ AH ] . padding = pkt [ AH ] . icv [ self . icv_size : ]
pkt [ AH ] . icv = pkt [ AH ] . icv [ : self . icv_size ]
pkt_icv = pkt [ AH ] . icv
clone = zero_mutable_fields ( pkt . copy ( ) , sending = False )
mac . update ( raw ( clone ) )
computed_icv = mac . finalize ( ) [ : self . icv_size ]
# XXX : Cannot use mac . verify because the ICV can be truncated
if pkt_icv != computed_icv :
raise IPSecIntegrityError ( 'pkt_icv=%r, computed_icv=%r' % ( pkt_icv , computed_icv ) ) |
def on_batch_end ( self , last_input , last_output , ** kwargs ) :
"Steps through the generators then each of the critics ." | self . G_A . zero_grad ( ) ;
self . G_B . zero_grad ( )
fake_A , fake_B = last_output [ 0 ] . detach ( ) , last_output [ 1 ] . detach ( )
real_A , real_B = last_input
self . _set_trainable ( D_A = True )
self . D_A . zero_grad ( )
loss_D_A = 0.5 * ( self . crit ( self . D_A ( real_A ) , True ) + self . crit ( self . D_A ( fake_A ) , False ) )
loss_D_A . backward ( )
self . opt_D_A . step ( )
self . _set_trainable ( D_B = True )
self . D_B . zero_grad ( )
loss_D_B = 0.5 * ( self . crit ( self . D_B ( real_B ) , True ) + self . crit ( self . D_B ( fake_B ) , False ) )
loss_D_B . backward ( )
self . opt_D_B . step ( )
self . _set_trainable ( )
metrics = self . learn . loss_func . metrics + [ loss_D_A , loss_D_B ]
for n , m in zip ( self . names , metrics ) :
self . smootheners [ n ] . add_value ( m ) |
def Size ( self ) :
"""Get the total size in bytes of the object .
Returns :
int : size .""" | return super ( ValidatorState , self ) . Size ( ) + self . PublicKey . Size ( ) + s . uint8 + self . Votes . Size ( ) |
def _make_rank ( dist_obj , n , mu , sigma , crit = 0.5 , upper = 10000 , xtol = 1 ) :
"""Make rank distribution using both ppf and brute force .
Setting crit = 1 is equivalent to just using the ppf
Parameters""" | qs = ( np . arange ( 1 , n + 1 ) - 0.5 ) / n
rank = np . empty ( len ( qs ) )
brute_ppf = lambda val , prob : prob - dist_obj . cdf ( val , mu , sigma )
qs_less = qs <= crit
ind = np . sum ( qs_less )
# Use ppf if qs are below crit
rank [ qs_less ] = dist_obj . ppf ( qs [ qs_less ] , mu , sigma )
# Use brute force if they are above
for i , tq in enumerate ( qs [ ~ qs_less ] ) :
j = ind + i
try : # TODO : Use an adaptable lower bound to increase speed
rank [ j ] = np . abs ( np . ceil ( optim . brentq ( brute_ppf , - 1 , upper , args = ( tq , ) , xtol = xtol ) ) )
except ValueError : # If it is above the upper bound set all remaining values
# to the previous value
rank [ j : ] = np . repeat ( rank [ j - 1 ] , len ( rank [ j : ] ) )
break
return rank |
def adapt_entropy ( line , cfg , filter_obj ) :
"""Determine best filter by dispersion of row values""" | lines = filter_obj . filter_all ( line )
res_c = [ len ( set ( it ) ) for it in lines ]
r = res_c . index ( min ( res_c ) )
return lines [ r ] |
def calmar ( sharpe , T = 1.0 ) :
'''Calculate the Calmar ratio for a Weiner process
@ param sharpe : Annualized Sharpe ratio
@ param T : Time interval in years''' | x = 0.5 * T * sharpe * sharpe
return x / qp ( x ) |
def hcenter_blit ( target , source , dest = ( 0 , 0 ) , area = None , special_flags = 0 ) :
'''The same as center _ blit ( ) , but only centers horizontally .''' | loc = lambda d , s : ( _vec ( d . get_width ( ) / 2 , 0 ) - _vec ( s . get_width ( ) / 2 , 0 ) )
_blitter ( loc , target , source , dest , area , special_flags ) |
def sense_ttb ( self , target ) :
"""Sense for a Type B Target is not supported .""" | info = "{device} does not support sense for Type B Target"
raise nfc . clf . UnsupportedTargetError ( info . format ( device = self ) ) |
def create_primary_zone_by_axfr ( self , account_name , zone_name , master , tsig_key = None , key_value = None ) :
"""Creates a new primary zone by zone transferring off a master .
Arguments :
account _ name - - The name of the account that will contain this zone .
zone _ name - - The name of the zone . It must be unique .
master - - Primary name server IP address .
Keyword Arguments :
tsig _ key - - For TSIG - enabled zones : The transaction signature key .
NOTE : Requires key _ value .
key _ value - - TSIG key secret .""" | zone_properties = { "name" : zone_name , "accountName" : account_name , "type" : "PRIMARY" }
if tsig_key is not None and key_value is not None :
name_server_info = { "ip" : master , "tsigKey" : tsig_key , "tsigKeyValue" : key_value }
else :
name_server_info = { "ip" : master }
primary_zone_info = { "forceImport" : True , "createType" : "TRANSFER" , "nameServer" : name_server_info }
zone_data = { "properties" : zone_properties , "primaryCreateInfo" : primary_zone_info }
return self . rest_api_connection . post ( "/v1/zones" , json . dumps ( zone_data ) ) |
def mixed_list_file ( cls , filename , values , bits ) :
"""Write a list of mixed values to a file .
If a file of the same name exists , it ' s contents are replaced .
See L { HexInput . mixed _ list _ file } for a description of the file format .
@ type filename : str
@ param filename : Name of the file to write .
@ type values : list ( int )
@ param values : List of mixed values to write to the file .
@ type bits : int
@ param bits :
( Optional ) Number of bits of the target architecture .
The default is platform dependent . See : L { HexOutput . integer _ size }""" | fd = open ( filename , 'w' )
for original in values :
try :
parsed = cls . integer ( original , bits )
except TypeError :
parsed = repr ( original )
print >> fd , parsed
fd . close ( ) |
def resolve_authconfig ( self , registry = None ) :
"""Returns the authentication data from the given auth configuration for a
specific registry . As with the Docker client , legacy entries in the
config with full URLs are stripped down to hostnames before checking
for a match . Returns None if no match was found .""" | if self . creds_store or self . cred_helpers :
store_name = self . get_credential_store ( registry )
if store_name is not None :
log . debug ( 'Using credentials store "{0}"' . format ( store_name ) )
cfg = self . _resolve_authconfig_credstore ( registry , store_name )
if cfg is not None :
return cfg
log . debug ( 'No entry in credstore - fetching from auth dict' )
# Default to the public index server
registry = resolve_index_name ( registry ) if registry else INDEX_NAME
log . debug ( "Looking for auth entry for {0}" . format ( repr ( registry ) ) )
if registry in self . auths :
log . debug ( "Found {0}" . format ( repr ( registry ) ) )
return self . auths [ registry ]
for key , conf in six . iteritems ( self . auths ) :
if resolve_index_name ( key ) == registry :
log . debug ( "Found {0}" . format ( repr ( key ) ) )
return conf
log . debug ( "No entry found" )
return None |
def parse ( self , module ) :
"""Extracts all the subroutine and function definitions from the specified module .""" | # Because of embedded types , we have to examine the entire module for
# executable definitions .
self . parse_block ( module . refstring , module , module , 0 )
# Now we can set the value of module . contains as the text after the start of
# the * first * non - embedded executable .
min_start = len ( module . refstring )
for x in module . executables :
if module . executables [ x ] . start < min_start :
min_start = module . executables [ x ] . start
module . contains = module . refstring [ min_start : : ] |
def _RunIpRoute ( self , args = None , options = None ) :
"""Run a command with ip route and return the response .
Args :
args : list , the string ip route command args to execute .
options : dict , the string parameters to append to the ip route command .
Returns :
string , the standard output from the ip route command execution .""" | args = args or [ ]
options = options or { }
command = [ 'ip' , 'route' ]
command . extend ( args )
for item in options . items ( ) :
command . extend ( item )
try :
process = subprocess . Popen ( command , stdout = subprocess . PIPE , stderr = subprocess . PIPE )
stdout , stderr = process . communicate ( )
except OSError as e :
self . logger . warning ( 'Exception running %s. %s.' , command , str ( e ) )
else :
if process . returncode :
message = 'Non-zero exit status running %s. %s.'
self . logger . warning ( message , command , stderr . strip ( ) )
else :
return stdout . decode ( 'utf-8' , 'replace' )
return '' |
def _definition ( self ) :
"""| HeaderPart | object containing content of this header .""" | headerReference = self . _sectPr . get_headerReference ( self . _hdrftr_index )
return self . _document_part . header_part ( headerReference . rId ) |
def _calculate_hash ( files , root ) :
"""Returns a hash of all of the given files at the given root .
Args :
files ( list [ str ] ) : file names to include in the hash calculation ,
relative to ` ` root ` ` .
root ( str ) : base directory to analyze files in .
Returns :
str : A hash of the hashes of the given files .""" | file_hash = hashlib . md5 ( )
for fname in sorted ( files ) :
f = os . path . join ( root , fname )
file_hash . update ( ( fname + "\0" ) . encode ( ) )
with open ( f , "rb" ) as fd :
for chunk in iter ( lambda : fd . read ( 4096 ) , "" ) :
if not chunk :
break
file_hash . update ( chunk )
file_hash . update ( "\0" . encode ( ) )
return file_hash . hexdigest ( ) |
def fromutc ( self , dt ) :
"""datetime in UTC - > datetime in local time .""" | if not isinstance ( dt , datetime ) :
raise TypeError ( "fromutc() requires a datetime argument" )
if dt . tzinfo is not self :
raise ValueError ( "dt.tzinfo is not self" )
dt = dt . replace ( tzinfo = None )
if dt > self . posix_after :
dt = self . posix_rules . fromutc ( dt . replace ( tzinfo = self . posix_rules ) )
return dt . replace ( tzinfo = self )
if dt < self . ut [ 1 ] :
tti = self . ti [ 0 ]
fold = 0
else :
idx = bisect . bisect_right ( self . ut , dt )
assert self . ut [ idx - 1 ] <= dt
assert idx == len ( self . ut ) or dt < self . ut [ idx ]
tti_prev , tti = self . ti [ idx - 2 : idx ]
# Detect fold
shift = tti_prev [ 0 ] - tti [ 0 ]
fold = ( shift > dt - self . ut [ idx - 1 ] )
dt += tti [ 0 ]
dt = dt . replace ( tzinfo = self )
if fold :
return enfold ( dt )
else :
return dt |
def check_login_password ( request ) :
"""Resource check login and password combination is valid .
request _ serializer : serializers . CheckLoginPassword
response _ serializer : serializers . User
responseMessages :
- code : 200
message : Login and password is valid
- code : 400
message : Login and password is not valid
- code : 401
message : Unauthorized""" | serializer = serializers . CheckLoginPassword ( request = request , data = request . data )
serializer . is_valid ( raise_exception = True )
username = serializer . validated_data [ 'username' ]
logger . debug ( 'Username and password correct' , extra = { 'username' : username } )
user_serializer = serializers . User ( instance = serializers . AuthUser . objects . get ( username = username ) )
return Response ( user_serializer . data ) |
def check_resize ( resize ) :
"""checks resize parameter if illegal value raises exception""" | if resize is None :
return
resize = resize . lower ( ) . strip ( )
if 'x' in resize :
tmp = resize . lower ( ) . split ( 'x' )
tmp = [ x . strip ( ) for x in resize . split ( 'x' ) ]
if len ( tmp ) == 2 and tmp [ 0 ] . isdigit ( ) and tmp [ 1 ] . isdigit ( ) :
return
elif '%' in resize :
tmp = resize . split ( '%' ) [ 0 ]
if tmp . isnumeric ( ) :
tmp = int ( tmp )
if 1 <= tmp <= 1000 :
return
else :
raise PercentageOutOfRange ( "percentage must be between 1 and 1000" )
raise MallformedResize ( 'Resize value "%s" is mallformed. ' 'Desired format is: {width}x{height} or {percentage}%%' % resize ) |
def repo_name ( self ) :
"""Returns a DataFrame of the repo names present in this project directory
: return : DataFrame""" | ds = [ [ x . repo_name ] for x in self . repos ]
df = pd . DataFrame ( ds , columns = [ 'repository' ] )
return df |
def run_analysis ( self , argv ) :
"""Run this analysis""" | args = self . _parser . parse_args ( argv )
if not HAVE_ST :
raise RuntimeError ( "Trying to run fermipy analysis, but don't have ST" )
gta = GTAnalysis ( args . config , logging = { 'verbosity' : 3 } , fileio = { 'workdir_regex' : '\.xml$|\.npy$' } )
gta . setup ( overwrite = False )
baseline_roi_fit ( gta , make_plots = args . make_plots , minmax_npred = [ 1e3 , np . inf ] )
localize_sources ( gta , nstep = 5 , dtheta_max = 0.5 , update = True , prefix = 'base' , make_plots = args . make_plots )
gta . find_sources ( sqrt_ts_threshold = 5.0 , search_skydir = gta . roi . skydir , search_minmax_radius = [ 1.0 , np . nan ] )
gta . optimize ( )
gta . print_roi ( )
gta . print_params ( )
gta . free_sources ( skydir = gta . roi . skydir , distance = 1.0 , pars = 'norm' )
gta . fit ( covar = True )
gta . print_roi ( )
gta . print_params ( )
gta . write_roi ( args . roi_baseline , make_plots = args . make_plots ) |
def on_mismatch ( self , pair ) :
"""Called for pairs that don ' t match ` match ` and ` exclude ` filters .
If - - delete - unmatched is on , remove the remote resource .""" | local_entry = pair . local
if self . options . get ( "delete_unmatched" ) and local_entry :
self . _log_action ( "delete" , "unmatched" , "<" , local_entry )
if local_entry . is_dir ( ) :
self . _remove_dir ( local_entry )
else :
self . _remove_file ( local_entry )
else :
self . _log_action ( "skip" , "unmatched" , "-" , pair . any_entry , min_level = 4 ) |
def add_parametric_object_params ( prepend = False , hide_private = True ) :
"""Add : class : ` ParametricObject < cqparts . params . ParametricObject > ` parameters
in a list to the * docstring * .
This is only intended to be used with * sphinx autodoc * .
In your * sphinx * ` ` config . py ` ` file : :
from cqparts . utils . sphinx import add _ parametric _ object _ params
def setup ( app ) :
app . connect ( " autodoc - process - docstring " , add _ parametric _ object _ params ( ) )
Then , when documenting your : class : ` Part < cqparts . Part > ` or
: class : ` Assembly < cqparts . Assembly > ` the
: class : ` ParametricObject < cqparts . params . ParametricObject > ` parameters
will also be documented in the output .
: param prepend : if True , parameters are added to the beginning of the * docstring * .
otherwise , they ' re appended at the end .
: type prepend : : class : ` bool `
: param hide _ private : if True , parameters with a ` ` _ ` ` prefix are not documented .
: type hide _ private : : class : ` bool `""" | from . . params import ParametricObject
def param_lines ( app , obj ) :
params = obj . class_params ( hidden = ( not hide_private ) )
# Header
doc_lines = [ ]
if params : # only add a header if it ' s relevant
doc_lines += [ ":class:`ParametricObject <cqparts.params.ParametricObject>` constructor parameters:" , "" , ]
for ( name , param ) in sorted ( params . items ( ) , key = lambda x : x [ 0 ] ) : # sort by name
doc_lines . append ( ':param {name}: {doc}' . format ( name = name , doc = param . _param ( ) , ) )
doc_lines . append ( ':type {name}: {doc}' . format ( name = name , doc = param . _type ( ) , ) )
return doc_lines
# Conditions for running above ` param _ lines ` function ( in order )
conditions = [ # ( all conditions must be met )
lambda o : type ( o ) == type , lambda o : o is not ParametricObject , lambda o : issubclass ( o , ParametricObject ) , ]
def callback ( app , what , name , obj , options , lines ) : # sphinx callback
# ( this method is what actually gets sent to the sphinx runtime )
if all ( c ( obj ) for c in conditions ) :
new_lines = param_lines ( app , obj )
_add_lines ( lines , new_lines , prepend = prepend )
return callback |
def get_target_list ( self , scan_id ) :
"""Get a scan ' s target list .""" | target_list = [ ]
for target , _ , _ in self . scans_table [ scan_id ] [ 'targets' ] :
target_list . append ( target )
return target_list |
def get_x_objs ( self , * dynac_type ) :
"""Return the indices into the lattice list attribute of elements whose Dynac
type matches the input string . Multiple input strings can be given , either
as a comma - separated list or as a genuine Python list .""" | return [ i for i in self . lattice for y in dynac_type if dynac_from_ele ( i ) == y ] |
def orbit ( self , azim , elev ) :
"""Orbits the camera around the center position .
Parameters
azim : float
Angle in degrees to rotate horizontally around the center point .
elev : float
Angle in degrees to rotate vertically around the center point .""" | self . azimuth += azim
self . elevation = np . clip ( self . elevation + elev , - 90 , 90 )
self . view_changed ( ) |
def cookies ( self ) :
"""Cookies parsed into a : class : ` FormsDict ` . Signed cookies are NOT
decoded . Use : meth : ` get _ cookie ` if you expect signed cookies .""" | cookies = SimpleCookie ( self . environ . get ( 'HTTP_COOKIE' , '' ) ) . values ( )
if len ( cookies ) > self . MAX_PARAMS :
raise HTTPError ( 413 , 'Too many cookies' )
return FormsDict ( ( c . key , c . value ) for c in cookies ) |
def confd_state_loaded_data_models_data_model_exported_exported_to_all_exported_to_all ( self , ** kwargs ) :
"""Auto Generated Code""" | config = ET . Element ( "config" )
confd_state = ET . SubElement ( config , "confd-state" , xmlns = "http://tail-f.com/yang/confd-monitoring" )
loaded_data_models = ET . SubElement ( confd_state , "loaded-data-models" )
data_model = ET . SubElement ( loaded_data_models , "data-model" )
name_key = ET . SubElement ( data_model , "name" )
name_key . text = kwargs . pop ( 'name' )
exported = ET . SubElement ( data_model , "exported" )
exported_to_all = ET . SubElement ( exported , "exported-to-all" )
exported_to_all = ET . SubElement ( exported_to_all , "exported-to-all" )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config ) |
def hard_reset ( self ) :
"""Resets the iterator and ignore roll over data""" | if self . seq is not None and self . shuffle :
random . shuffle ( self . seq )
if self . imgrec is not None :
self . imgrec . reset ( )
self . cur = 0
self . _allow_read = True
self . _cache_data = None
self . _cache_label = None
self . _cache_idx = None |
def date ( self , year : Number , month : Number , day : Number ) -> Date :
"""Takes three numbers and returns a ` ` Date ` ` object whose year , month , and day are the three
numbers in that order .""" | return Date ( year , month , day ) |
def is_reversible ( self , reaction_id ) :
"""Whether the given reaction is reversible""" | if reaction_id not in self . _reaction_set :
raise ValueError ( 'Reaction not in model: {}' . format ( reaction_id ) )
return self . _database . is_reversible ( reaction_id ) |
def cmd_ssh_user ( tar_aminame , inst_name ) :
"""Calculate instance login - username based on image - name .
Args :
tar _ aminame ( str ) : name of the image instance created with .
inst _ name ( str ) : name of the instance .
Returns :
username ( str ) : name for ssh based on AMI - name .""" | if tar_aminame == "Unknown" :
tar_aminame = inst_name
# first 5 chars of AMI - name can be anywhere in AMI - Name
userlu = { "ubunt" : "ubuntu" , "debia" : "admin" , "fedor" : "root" , "cento" : "centos" , "openb" : "root" }
usertemp = [ 'name' ] + [ value for key , value in list ( userlu . items ( ) ) if key in tar_aminame . lower ( ) ]
usertemp = dict ( zip ( usertemp [ : : 2 ] , usertemp [ 1 : : 2 ] ) )
username = usertemp . get ( 'name' , 'ec2-user' )
debg . dprint ( "loginuser Calculated: " , username )
return username |
def __undo_filter_paeth ( self , scanline ) :
"""Undo Paeth filter .""" | ai = - self . fu
previous = self . prev
for i in range ( len ( scanline ) ) :
x = scanline [ i ]
if ai < 0 :
pr = previous [ i ]
# a = c = 0
else :
a = scanline [ ai ]
# result
c = previous [ ai ]
b = previous [ i ]
pa = abs ( b - c )
pb = abs ( a - c )
pc = abs ( a + b - c - c )
if pa <= pb and pa <= pc : # False
pr = a
elif pb <= pc : # True
pr = b
else :
pr = c
scanline [ i ] = ( x + pr ) & 0xff
# result
ai += 1 |
def com_google_fonts_check_name_copyright_length ( ttFont ) :
"""Length of copyright notice must not exceed 500 characters .""" | from fontbakery . utils import get_name_entries
failed = False
for notice in get_name_entries ( ttFont , NameID . COPYRIGHT_NOTICE ) :
notice_str = notice . string . decode ( notice . getEncoding ( ) )
if len ( notice_str ) > 500 :
failed = True
yield FAIL , ( "The length of the following copyright notice ({})" " exceeds 500 chars: '{}'" "" ) . format ( len ( notice_str ) , notice_str )
if not failed :
yield PASS , ( "All copyright notice name entries on the" " 'name' table are shorter than 500 characters." ) |
def source_ports ( self ) :
"""Source ports for this blacklist entry . If no ports are specified ( i . e . ALL
ports ) , ' ANY ' is returned .
: rtype : str""" | start_port = self . blacklist . get ( 'BlacklistEntrySourcePort' )
if start_port is not None :
return '{}-{}' . format ( start_port , self . blacklist . get ( 'BlacklistEntrySourcePortRange' ) )
return 'ANY' |
def chunk ( self , count ) :
"""Chunk the results of the query
: param count : The chunk size
: type count : int
: return : The current chunk
: rtype : list""" | page = 1
results = self . for_page ( page , count ) . get ( )
while results :
yield results
page += 1
results = self . for_page ( page , count ) . get ( ) |
def get_by_hostname ( self , hostname ) :
"""Get enclosure by it ' s hostname""" | def filter_by_hostname ( hostname , enclosure ) :
is_primary_ip = ( 'activeOaPreferredIP' in enclosure and enclosure [ 'activeOaPreferredIP' ] == hostname )
is_standby_ip = ( 'standbyOaPreferredIP' in enclosure and enclosure [ 'standbyOaPreferredIP' ] == hostname )
return is_primary_ip or is_standby_ip
enclosures = self . get_all ( )
result = [ x for x in enclosures if filter_by_hostname ( hostname , x ) ]
if result :
new_resource = self . new ( self . _connection , result [ 0 ] )
else :
new_resource = None
return new_resource |
def service_executions ( self , name = None , pk = None , scope = None , service = None , ** kwargs ) :
"""Retrieve Service Executions .
If additional ` keyword = value ` arguments are provided , these are added to the request parameters . Please
refer to the documentation of the KE - chain API for additional query parameters .
: param name : ( optional ) name to limit the search for
: type name : basestring or None
: param pk : ( optional ) primary key or id ( UUID ) of the service to search for
: type pk : basestring or None
: param scope : ( optional ) id ( UUID ) of the scope to search in
: type scope : basestring or None
: param service : ( optional ) service UUID to filter on
: type service : basestring or None
: param kwargs : ( optional ) additional search keyword arguments
: type kwargs : dict or None
: return : a single : class : ` models . ServiceExecution ` object
: raises NotFoundError : When no ` ServiceExecution ` object is found""" | request_params = { 'name' : name , 'id' : pk , 'service' : service , 'scope' : scope }
if kwargs :
request_params . update ( ** kwargs )
r = self . _request ( 'GET' , self . _build_url ( 'service_executions' ) , params = request_params )
if r . status_code != requests . codes . ok : # pragma : no cover
raise NotFoundError ( "Could not retrieve service executions" )
data = r . json ( )
return [ ServiceExecution ( service_exeuction , client = self ) for service_exeuction in data [ 'results' ] ] |
def stream ( self , sha ) :
"""For now , all lookup is done by git itself""" | hexsha , typename , size , stream = self . _git . stream_object_data ( bin_to_hex ( sha ) )
return OStream ( hex_to_bin ( hexsha ) , typename , size , stream ) |
def _construct_values_list ( values ) :
"""This values _ list is a strange construction , because of ini format .
We need to extract the values with the following supported format :
> > > key = value0
. . . value1
. . . # comment line here
. . . value2
given that normally , either value0 is supplied , or ( value1 , value2 ) ,
but still allowing for all three at once .
Furthermore , with the configparser , we will get a list of values ,
and intermediate blank lines , but no comments . This means that we can ' t
merely use the count of values ' items to heuristically " skip ahead " lines ,
because we still have to manually parse through this .
Therefore , we construct the values _ list in the following fashion :
1 . Keep the first value ( in the example , this is ` value0 ` )
2 . For all other values , ignore blank lines .
Then , we can parse through , and look for values only .""" | lines = values . splitlines ( )
values_list = lines [ : 1 ]
values_list . extend ( filter ( None , lines [ 1 : ] ) )
return values_list |
def create_node ( ** kwargs ) :
'''convenience function to make the rest api call for node creation .''' | name = kwargs [ 'name' ]
size = kwargs [ 'size' ]
image = kwargs [ 'image' ]
location = kwargs [ 'location' ]
networks = kwargs . get ( 'networks' )
tag = kwargs . get ( 'tag' )
locality = kwargs . get ( 'locality' )
metadata = kwargs . get ( 'metadata' )
firewall_enabled = kwargs . get ( 'firewall_enabled' )
create_data = { 'name' : name , 'package' : size [ 'name' ] , 'image' : image [ 'name' ] , }
if networks is not None :
create_data [ 'networks' ] = networks
if locality is not None :
create_data [ 'locality' ] = locality
if metadata is not None :
for key , value in six . iteritems ( metadata ) :
create_data [ 'metadata.{0}' . format ( key ) ] = value
if tag is not None :
for key , value in six . iteritems ( tag ) :
create_data [ 'tag.{0}' . format ( key ) ] = value
if firewall_enabled is not None :
create_data [ 'firewall_enabled' ] = firewall_enabled
data = salt . utils . json . dumps ( create_data )
ret = query ( command = 'my/machines' , data = data , method = 'POST' , location = location )
if ret [ 0 ] in VALID_RESPONSE_CODES :
return ret [ 1 ]
else :
log . error ( 'Failed to create node %s: %s' , name , ret [ 1 ] )
return { } |
def _read_chunk_from_socket ( socket ) :
"""( coroutine )
Turn socket reading into coroutine .""" | fd = socket . fileno ( )
f = Future ( )
def read_callback ( ) :
get_event_loop ( ) . remove_reader ( fd )
# Read next chunk .
try :
data = socket . recv ( 1024 )
except OSError as e : # On OSX , when we try to create a new window by typing " pymux
# new - window " in a centain pane , very often we get the following
# error : " OSError : [ Errno 9 ] Bad file descriptor . "
# This doesn ' t seem very harmful , and we can just try again .
logger . warning ( 'Got OSError while reading data from client: %s. ' 'Trying again.' , e )
f . set_result ( '' )
return
if data :
f . set_result ( data )
else :
f . set_exception ( BrokenPipeError )
get_event_loop ( ) . add_reader ( fd , read_callback )
return f |
def get_ra ( self ) :
"""Get R . A . corresponding to the current position ( ICRS , J2000)
: return : Right Ascension""" | try :
return self . ra . value
except AttributeError : # Transform from L , B to R . A . , Dec
return self . sky_coord . transform_to ( 'icrs' ) . ra . value |
def add ( self , value , timeout = None , force = False ) :
""": param value : ADDED THE THE QUEUE
: param timeout : HOW LONG TO WAIT FOR QUEUE TO NOT BE FULL
: param force : ADD TO QUEUE , EVEN IF FULL ( USE ONLY WHEN CONSUMER IS RETURNING WORK TO THE QUEUE )
: return : self""" | with self . lock :
if value is THREAD_STOP : # INSIDE THE lock SO THAT EXITING WILL RELEASE wait ( )
self . queue . append ( value )
self . closed . go ( )
return
if not force :
self . _wait_for_queue_space ( timeout = timeout )
if self . closed and not self . allow_add_after_close :
Log . error ( "Do not add to closed queue" )
else :
if self . unique :
if value not in self . queue :
self . queue . append ( value )
else :
self . queue . append ( value )
return self |
def _get_error_response ( self , exc ) :
"""Generate HttpResponse based on the HttpStatusCodeError .""" | if exc . has_code ( codes . UNAUTHORIZED ) :
return self . _get_auth_challenge ( exc )
else :
if exc . has_code ( codes . INTERNAL_SERVER_ERROR ) :
logging . getLogger ( 'devil' ) . error ( 'devil caught http error: ' + str ( exc ) , exc_info = True )
else :
logging . getLogger ( 'devil' ) . error ( 'devil caught http error: ' + str ( exc ) )
content = exc . content or ''
return HttpResponse ( content = content , status = exc . get_code_num ( ) ) |
def _create_index_content ( words ) :
"""Create html string of index file .
Parameters
words : list of str
List of cached words .
Returns
str
html string .""" | content = [ "<h1>Index</h1>" , "<ul>" ]
for word in words :
content . append ( '<li><a href="translations/{word}.html">{word}</a></li>' . format ( word = word ) )
content . append ( "</ul>" )
if not words :
content . append ( "<i>Nothing to see here ...yet!</i>" )
return "\n" . join ( content ) |
def reopen ( self ) :
"""close and reopen the fits file with the same mode""" | self . _FITS . close ( )
del self . _FITS
self . _FITS = _fitsio_wrap . FITS ( self . _filename , self . intmode , 0 )
self . update_hdu_list ( ) |
def _get_permission ( self , authorizer_name , authorizer_lambda_function_arn ) :
"""Constructs and returns the Lambda Permission resource allowing the Authorizer to invoke the function .
: returns : the permission resource
: rtype : model . lambda _ . LambdaPermission""" | rest_api = ApiGatewayRestApi ( self . logical_id , depends_on = self . depends_on , attributes = self . resource_attributes )
api_id = rest_api . get_runtime_attr ( 'rest_api_id' )
partition = ArnGenerator . get_partition_name ( )
resource = '${__ApiId__}/authorizers/*'
source_arn = fnSub ( ArnGenerator . generate_arn ( partition = partition , service = 'execute-api' , resource = resource ) , { "__ApiId__" : api_id } )
lambda_permission = LambdaPermission ( self . logical_id + authorizer_name + 'AuthorizerPermission' , attributes = self . passthrough_resource_attributes )
lambda_permission . Action = 'lambda:invokeFunction'
lambda_permission . FunctionName = authorizer_lambda_function_arn
lambda_permission . Principal = 'apigateway.amazonaws.com'
lambda_permission . SourceArn = source_arn
return lambda_permission |
def env_resolver ( cls , var_name : str , env_name : str = None , default : Any = _NONE ) -> 'Configuration' :
"""Method for configuring environment resolver .
: param var _ name : Variable name
: param env _ name : An optional environment variable name . If not set haps looks for ` HAPS _ var _ name `
: param default : Default value for variable . If it ' s a callable , is called before return . If not provided : class : ` ~ haps . exceptions . UnknownConfigVariable ` is raised
: return : : class : ` ~ haps . config . Configuration ` instance for easy chaining""" | cls . resolver ( var_name ) ( partial ( _env_resolver , var_name = var_name , env_name = env_name , default = default ) )
return cls ( ) |
def init ( directory = None ) :
"""Initializes a Gitpress presentation repository at the specified directory .""" | repo = repo_path ( directory )
if os . path . isdir ( repo ) :
raise RepositoryAlreadyExistsError ( directory , repo )
# Initialize repository with default template
shutil . copytree ( default_template_path , repo )
message = '"Default presentation content."'
subprocess . call ( [ 'git' , 'init' , '-q' , repo ] )
subprocess . call ( [ 'git' , 'add' , '.' ] , cwd = repo )
subprocess . call ( [ 'git' , 'commit' , '-q' , '-m' , message ] , cwd = repo )
return repo |
def max ( self , value ) :
"""The max property .
Args :
value ( float ) . the property value .""" | if value == self . _defaults [ 'max' ] and 'max' in self . _values :
del self . _values [ 'max' ]
else :
self . _values [ 'max' ] = value |
def subsets_changed ( last_observed_subsets , subsets ) :
"""> > > Kubernetes . subsets _ changed ( [ ] , [ ] )
False
> > > Kubernetes . subsets _ changed ( [ ] , [ k8s _ client . V1EndpointSubset ( ) ] )
True
> > > s1 = [ k8s _ client . V1EndpointSubset ( addresses = [ k8s _ client . V1EndpointAddress ( ip = ' 1.2.3.4 ' ) ] ) ]
> > > s2 = [ k8s _ client . V1EndpointSubset ( addresses = [ k8s _ client . V1EndpointAddress ( ip = ' 1.2.3.5 ' ) ] ) ]
> > > Kubernetes . subsets _ changed ( s1 , s2)
True
> > > a = [ k8s _ client . V1EndpointAddress ( ip = ' 1.2.3.4 ' ) ]
> > > s1 = [ k8s _ client . V1EndpointSubset ( addresses = a , ports = [ k8s _ client . V1EndpointPort ( protocol = ' TCP ' , port = 1 ) ] ) ]
> > > s2 = [ k8s _ client . V1EndpointSubset ( addresses = a , ports = [ k8s _ client . V1EndpointPort ( port = 5432 ) ] ) ]
> > > Kubernetes . subsets _ changed ( s1 , s2)
True
> > > p1 = k8s _ client . V1EndpointPort ( name = ' port1 ' , port = 1)
> > > p2 = k8s _ client . V1EndpointPort ( name = ' port2 ' , port = 2)
> > > p3 = k8s _ client . V1EndpointPort ( name = ' port3 ' , port = 3)
> > > s1 = [ k8s _ client . V1EndpointSubset ( addresses = a , ports = [ p1 , p2 ] ) ]
> > > s2 = [ k8s _ client . V1EndpointSubset ( addresses = a , ports = [ p2 , p3 ] ) ]
> > > Kubernetes . subsets _ changed ( s1 , s2)
True
> > > s2 = [ k8s _ client . V1EndpointSubset ( addresses = a , ports = [ p2 , p1 ] ) ]
> > > Kubernetes . subsets _ changed ( s1 , s2)
False""" | if len ( last_observed_subsets ) != len ( subsets ) :
return True
if subsets == [ ] :
return False
if len ( last_observed_subsets [ 0 ] . addresses or [ ] ) != 1 or last_observed_subsets [ 0 ] . addresses [ 0 ] . ip != subsets [ 0 ] . addresses [ 0 ] . ip or len ( last_observed_subsets [ 0 ] . ports ) != len ( subsets [ 0 ] . ports ) :
return True
if len ( subsets [ 0 ] . ports ) == 1 :
return not Kubernetes . compare_ports ( last_observed_subsets [ 0 ] . ports [ 0 ] , subsets [ 0 ] . ports [ 0 ] )
observed_ports = { p . name : p for p in last_observed_subsets [ 0 ] . ports }
for p in subsets [ 0 ] . ports :
if p . name not in observed_ports or not Kubernetes . compare_ports ( p , observed_ports . pop ( p . name ) ) :
return True
return False |
def unique_iter ( seq ) :
"""See http : / / www . peterbe . com / plog / uniqifiers - benchmark
Originally f8 written by Dave Kirby""" | seen = set ( )
return [ x for x in seq if x not in seen and not seen . add ( x ) ] |
def gen_headers ( ) -> Dict [ str , str ] :
"""Generate a header pairing .""" | ua_list : List [ str ] = [ 'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.117 Safari/537.36' ]
headers : Dict [ str , str ] = { 'User-Agent' : ua_list [ random . randint ( 0 , len ( ua_list ) - 1 ) ] }
return headers |
def save_model ( self , request , obj , form , change ) :
"""Sends a tweet with the title / short _ url if applicable .""" | super ( TweetableAdminMixin , self ) . save_model ( request , obj , form , change )
if Api and request . POST . get ( "send_tweet" , False ) :
auth_settings = get_auth_settings ( )
obj . set_short_url ( )
message = truncatechars ( obj , 140 - len ( obj . short_url ) - 1 )
api = Api ( * auth_settings )
api . PostUpdate ( "%s %s" % ( message , obj . short_url ) ) |
def do_blacklist ( config , config_dir ) :
"""CLI action " run editor for blacklist " .""" | if not os . path . exists ( config_dir ) :
print "Configuration '{}' does not exist." . format ( config )
exit ( 1 )
editor = os . environ [ "EDITOR" ]
config_blacklist = os . path . join ( config_dir , 'blacklist' )
call ( [ editor , config_blacklist ] )
print "Blacklist configuration has been updated." |
def get_returner_options ( virtualname = None , ret = None , attrs = None , ** kwargs ) :
'''Get the returner options from salt .
: param str virtualname : The returner virtualname ( as returned
by _ _ virtual _ _ ( )
: param ret : result of the module that ran . dict - like object
May contain a ` ret _ config ` key pointing to a string
If a ` ret _ config ` is specified , config options are read from : :
value . virtualname . option
If not , config options are read from : :
value . virtualname . option
: param attrs : options the returner wants to read
: param _ _ opts _ _ : Optional dict - like object that contains a fallback config
in case the param ` _ _ salt _ _ ` is not supplied .
Defaults to empty dict .
: param _ _ salt _ _ : Optional dict - like object that exposes the salt API .
Defaults to empty dict .
a ) if _ _ salt _ _ contains a ' config . option ' configuration options ,
we infer the returner is being called from a state or module run - >
config is a copy of the ` config . option ` function
b ) if _ _ salt _ _ was not available , we infer that the returner is being
called from the Salt scheduler , so we look for the
configuration options in the param ` _ _ opts _ _ `
- > cfg is a copy for the _ _ opts _ _ dictionary
: param str profile _ attr : Optional .
If supplied , an overriding config profile is read from
the corresponding key of ` _ _ salt _ _ ` .
: param dict profile _ attrs : Optional
. . fixme : : only keys are read
For each key in profile _ attr , a value is read in the are
used to fetch a value pointed by ' virtualname . % key ' in
the dict found thanks to the param ` profile _ attr `''' | ret_config = _fetch_ret_config ( ret )
attrs = attrs or { }
profile_attr = kwargs . get ( 'profile_attr' , None )
profile_attrs = kwargs . get ( 'profile_attrs' , None )
defaults = kwargs . get ( 'defaults' , None )
__salt__ = kwargs . get ( '__salt__' , { } )
__opts__ = kwargs . get ( '__opts__' , { } )
# select the config source
cfg = __salt__ . get ( 'config.option' , __opts__ )
# browse the config for relevant options , store them in a dict
_options = dict ( _options_browser ( cfg , ret_config , defaults , virtualname , attrs , ) )
# override some values with relevant profile options
_options . update ( _fetch_profile_opts ( cfg , virtualname , __salt__ , _options , profile_attr , profile_attrs ) )
# override some values with relevant options from
# keyword arguments passed via return _ kwargs
if ret and 'ret_kwargs' in ret :
_options . update ( ret [ 'ret_kwargs' ] )
return _options |
def to_bigquery_ddl ( self , name_case = DdlParseBase . NAME_CASE . original ) :
"""Generate BigQuery CREATE TABLE statements
: param name _ case : name case type
* DdlParse . NAME _ CASE . original : Return to no convert
* DdlParse . NAME _ CASE . lower : Return to lower
* DdlParse . NAME _ CASE . upper : Return to upper
: return : BigQuery CREATE TABLE statements""" | if self . schema is None :
dataset = "dataset"
elif name_case == self . NAME_CASE . lower :
dataset = self . schema . lower ( )
elif name_case == self . NAME_CASE . upper :
dataset = self . schema . upper ( )
else :
dataset = self . schema
cols_defs = [ ]
for col in self . columns . values ( ) :
col_name = col . get_name ( name_case )
if col . array_dimensional < 1 : # no array data type
type = col . bigquery_standard_data_type
not_null = " NOT NULL" if col . not_null else ""
else : # one or multiple dimensional array data type
type_front = "ARRAY<"
type_back = ">"
for i in range ( 1 , col . array_dimensional ) :
type_front += "STRUCT<dimension_{} ARRAY<" . format ( i )
type_back += ">>"
type = "{}{}{}" . format ( type_front , col . bigquery_standard_data_type , type_back )
not_null = ""
cols_defs . append ( "{name} {type}{not_null}" . format ( name = col_name , type = type , not_null = not_null , ) )
return textwrap . dedent ( """\
#standardSQL
CREATE TABLE `project.{dataset}.{table}`
(
{colmns_define}
)""" ) . format ( dataset = dataset , table = self . get_name ( name_case ) , colmns_define = ",\n " . join ( cols_defs ) , ) |
def check_ip ( ip , log = False ) :
"""Attempts a connection to the TV and checks if there really is a TV .""" | if log :
print ( 'Checking ip: {}...' . format ( ip ) )
request_timeout = 0.1
try :
tv_url = 'http://{}:6095/request?action=isalive' . format ( ip )
request = requests . get ( tv_url , timeout = request_timeout )
except requests . exceptions . ConnectTimeout :
return False
return request . status_code == 200 |
def delete ( self , version_name ) :
"""Delete a version of model .
Args :
version _ name : the name of the version in short form , such as " v1 " .""" | name = ( '%s/versions/%s' % ( self . _full_model_name , version_name ) )
response = self . _api . projects ( ) . models ( ) . versions ( ) . delete ( name = name ) . execute ( )
if 'name' not in response :
raise Exception ( 'Invalid response from service. "name" is not found.' )
_util . wait_for_long_running_operation ( response [ 'name' ] ) |
def generate_pl_dataset ( n , m , useDirichlet = True ) :
"""Description :
Generate a Plackett - Luce dataset and return the parameters and votes
Parameters :
n : number of votes to generate
m : number of alternatives
useDirichlet : boolean flag to use the Dirichlet distribution""" | gamma = None
if useDirichlet :
gamma = np . random . dirichlet ( np . ones ( m ) )
else :
gamma = np . random . rand ( m )
gamma /= np . sum ( gamma )
# normalize sum to 1.0 ( not needed for Dirichlet )
votes = [ ]
for i in range ( n ) : # generate vote for every agent
votes . append ( draw_pl_vote ( m , gamma ) )
return ( gamma , votes ) |
def validate_config ( self , organization , config , actor = None ) :
"""if config [ ' foo ' ] and not config [ ' bar ' ] :
raise PluginError ( ' You cannot configure foo with bar ' )
return config""" | if config . get ( 'name' ) :
client = self . get_client ( actor )
try :
repo = client . get_repo ( config [ 'name' ] )
except Exception as e :
self . raise_error ( e )
else :
config [ 'external_id' ] = six . text_type ( repo [ 'id' ] )
return config |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.