signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def remove_content_history_in_cloud ( self , page_id , version_id ) :
"""Remove content history . It works in CLOUD
: param page _ id :
: param version _ id :
: return :"""
|
url = 'rest/api/content/{id}/version/{versionId}' . format ( id = page_id , versionId = version_id )
self . delete ( url )
|
def record_config ( self , rec_opt ) :
"""rec _ opt is the Record options listed as example below :
| ParamName | Value | Description |
| Record [ ch ] . PreRecord | Integer | Range is [ 0-300 ] |
| | | Prerecord seconds , 0 no prerecord |
| | | ch ( Channel number ) starts from 0 |
| Record [ ch ] . | | wd ( week day ) |
| TimeSection [ wd ] [ ts ] | string | range is [ 0-6 ] ( Sun / Sat ) |
| | | ts ( time section ) range is [ 0-23 ] |
| | | time section table index |
| | | Format : mas hh : mm : ss - hh : mm : ss |
| | | Mask : [ 0-65535 ] , hh : [ 0-24 ] , |
| | | mm : [ 0-59 ] , ss : [ 0-59 ] |
| | | Mask indicate record type by bits : |
| | | Bit0 : regular record |
| | | Bit1 : motion detection record |
| | | Bit2 : alarm record |
| | | Bit3 : card record |
Example :
Record [ 0 ] . TimeSection [ 0 ] [ 0 ] = 6 00:00:00-23:59:59
rec _ opt format :
< paramName > = < paramValue > [ & < paramName > = < paramValue > . . . ]"""
|
ret = self . command ( 'configManager.cgi?action=setConfig&{0}' . format ( rec_opt ) )
return ret . content . decode ( 'utf-8' )
|
def netmiko_commands ( * commands , ** kwargs ) :
'''. . versionadded : : 2019.2.0
Invoke one or more commands to be executed on the remote device , via Netmiko .
Returns a list of strings , with the output from each command .
commands
A list of commands to be executed .
expect _ string
Regular expression pattern to use for determining end of output .
If left blank will default to being based on router prompt .
delay _ factor : ` ` 1 ` `
Multiplying factor used to adjust delays ( default : ` ` 1 ` ` ) .
max _ loops : ` ` 500 ` `
Controls wait time in conjunction with delay _ factor . Will default to be
based upon self . timeout .
auto _ find _ prompt : ` ` True ` `
Whether it should try to auto - detect the prompt ( default : ` ` True ` ` ) .
strip _ prompt : ` ` True ` `
Remove the trailing router prompt from the output ( default : ` ` True ` ` ) .
strip _ command : ` ` True ` `
Remove the echo of the command from the output ( default : ` ` True ` ` ) .
normalize : ` ` True ` `
Ensure the proper enter is sent at end of command ( default : ` ` True ` ` ) .
use _ textfsm : ` ` False ` `
Process command output through TextFSM template ( default : ` ` False ` ` ) .
CLI Example :
. . code - block : : bash
salt ' * ' napalm . netmiko _ commands ' show version ' ' show interfaces ' '''
|
conn = netmiko_conn ( ** kwargs )
ret = [ ]
for cmd in commands :
ret . append ( conn . send_command ( cmd ) )
return ret
|
def _load_obj ( name ) :
"""Import Python object from its name as string .
Parameters
name : str
Object name to import ( e . g . pandas . Series . str . upper )
Returns
object
Python object that can be a class , method , function . . .
Examples
> > > Docstring . _ load _ obj ( ' pandas . Series ' )
< class ' pandas . core . series . Series ' >"""
|
for maxsplit in range ( 1 , name . count ( '.' ) + 1 ) : # TODO when py3 only replace by : module , * func _ parts = . . .
func_name_split = name . rsplit ( '.' , maxsplit )
module = func_name_split [ 0 ]
func_parts = func_name_split [ 1 : ]
try :
obj = importlib . import_module ( module )
except ImportError :
pass
else :
continue
if 'obj' not in locals ( ) :
raise ImportError ( 'No module can be imported ' 'from "{}"' . format ( name ) )
for part in func_parts :
obj = getattr ( obj , part )
return obj
|
def destroy ( self , blocking = False ) :
"""Destroy all data and metadata related to this broadcast variable .
Use this with caution ; once a broadcast variable has been destroyed ,
it cannot be used again .
. . versionchanged : : 3.0.0
Added optional argument ` blocking ` to specify whether to block until all
blocks are deleted ."""
|
if self . _jbroadcast is None :
raise Exception ( "Broadcast can only be destroyed in driver" )
self . _jbroadcast . destroy ( blocking )
os . unlink ( self . _path )
|
def _walk_polyline ( tid , intersect , T , mesh , plane , dist_tol ) :
"""Given an intersection , walk through the mesh triangles , computing
intersection with the cut plane for each visited triangle and adding
those intersection to a polyline ."""
|
T = set ( T )
p = [ ]
# Loop until we have explored all the triangles for the current
# polyline
while True :
p . append ( intersect [ 1 ] )
tid , intersections , T = get_next_triangle ( mesh , T , plane , intersect , dist_tol )
if tid is None :
break
# get _ next _ triangle returns triangles that our plane actually
# intersects ( as opposed to touching only a single vertex ) ,
# hence the assert
assert len ( intersections ) == 2
# Of the two returned intersections , one should have the
# intersection point equal to p [ - 1]
if la . norm ( intersections [ 0 ] [ 1 ] - p [ - 1 ] ) < dist_tol :
intersect = intersections [ 1 ]
else :
assert la . norm ( intersections [ 1 ] [ 1 ] - p [ - 1 ] ) < dist_tol , '%s not close to %s' % ( str ( p [ - 1 ] ) , str ( intersections ) )
intersect = intersections [ 0 ]
return p , T
|
def exclude ( * what ) :
"""Blacklist * what * .
: param what : What to blacklist .
: type what : : class : ` list ` of classes or : class : ` attr . Attribute ` \\ s .
: rtype : : class : ` callable `"""
|
cls , attrs = _split_what ( what )
def exclude_ ( attribute , value ) :
return value . __class__ not in cls and attribute not in attrs
return exclude_
|
def _get_column_and_bindparam ( left , right , operator ) :
"""Return left and right expressions in ( Column , BindParameter ) order ."""
|
if not isinstance ( left , Column ) :
left , right = right , left
if not isinstance ( left , Column ) :
raise AssertionError ( u'SQLAlchemy operator {} expects Column as left side the of expression, got {} ' u'of type {} instead.' . format ( operator , left , type ( left ) ) )
if not isinstance ( right , BindParameter ) :
raise AssertionError ( u'SQLAlchemy operator {} expects BindParameter as the right side of the expression, ' u'got {} of type {} instead.' . format ( operator , right , type ( right ) ) )
return left , right
|
def _setup ( self ) :
"""Set the binning info we need from the ` edges `"""
|
dtype = [ ( 'inv' , 'f8' ) , ( 'min' , 'f8' ) , ( 'max' , 'f8' ) , ( 'N' , 'i4' ) , ( 'spacing' , 'object' ) ]
dtype = numpy . dtype ( dtype )
self . _info = numpy . empty ( self . Ndim , dtype = dtype )
self . min = self . _info [ 'min' ]
self . max = self . _info [ 'max' ]
self . N = self . _info [ 'N' ]
self . inv = self . _info [ 'inv' ]
self . spacing = self . _info [ 'spacing' ]
for i , dim in enumerate ( self . dims ) :
self . N [ i ] = len ( self . edges [ i ] ) - 1
self . min [ i ] = self . edges [ i ] [ 0 ]
self . max [ i ] = self . edges [ i ] [ - 1 ]
# determine the type of spacing
self . spacing [ i ] = None
lin_diff = numpy . diff ( self . edges [ i ] )
with numpy . errstate ( divide = 'ignore' , invalid = 'ignore' ) :
log_diff = numpy . diff ( numpy . log10 ( self . edges [ i ] ) )
if numpy . allclose ( lin_diff , lin_diff [ 0 ] ) :
self . spacing [ i ] = 'linspace'
self . inv [ i ] = self . N [ i ] * 1.0 / ( self . max [ i ] - self . min [ i ] )
elif numpy . allclose ( log_diff , log_diff [ 0 ] ) :
self . spacing [ i ] = 'logspace'
self . inv [ i ] = self . N [ i ] * 1.0 / numpy . log10 ( self . max [ i ] / self . min [ i ] )
self . shape = self . N + 2
# store Rmax
self . Rmax = self . max [ 0 ]
|
def e_164 ( msisdn : str ) -> str :
"""Returns the msisdn in E . 164 international format ."""
|
# Phonenumbers library requires the + to identify the country , so we add it if it
# does not already exist
number = phonenumbers . parse ( "+{}" . format ( msisdn . lstrip ( "+" ) ) , None )
return phonenumbers . format_number ( number , phonenumbers . PhoneNumberFormat . E164 )
|
def _link ( self , * args , ** kwargs ) :
"""Returns a link , potentially pre - redirected"""
|
if self . _record . redirect_url :
return links . resolve ( self . _record . redirect_url , self . search_path , kwargs . get ( 'absolute' ) )
return self . _permalink ( * args , ** kwargs )
|
def _get_platform_name ( ncattr ) :
"""Determine name of the platform"""
|
match = re . match ( r'G-(\d+)' , ncattr )
if match :
return SPACECRAFTS . get ( int ( match . groups ( ) [ 0 ] ) )
return None
|
def _verify_jws ( self , payload , key ) :
"""Verify the given JWS payload with the given key and return the payload"""
|
jws = JWS . from_compact ( payload )
try :
alg = jws . signature . combined . alg . name
except KeyError :
msg = 'No alg value found in header'
raise SuspiciousOperation ( msg )
if alg != self . OIDC_RP_SIGN_ALGO :
msg = "The provider algorithm {!r} does not match the client's " "OIDC_RP_SIGN_ALGO." . format ( alg )
raise SuspiciousOperation ( msg )
if isinstance ( key , six . string_types ) : # Use smart _ bytes here since the key string comes from settings .
jwk = JWK . load ( smart_bytes ( key ) )
else : # The key is a json returned from the IDP JWKS endpoint .
jwk = JWK . from_json ( key )
if not jws . verify ( jwk ) :
msg = 'JWS token verification failed.'
raise SuspiciousOperation ( msg )
return jws . payload
|
def _expand_endpoint_name ( endpoint_name , flags ) :
"""Populate any ` ` { endpoint _ name } ` ` tags in the flag names for the given
handler , based on the handlers module / file name ."""
|
return tuple ( flag . format ( endpoint_name = endpoint_name ) for flag in flags )
|
def _create_dot ( self , vd , parent , rock_ridge , xa , file_mode ) : # type : ( headervd . PrimaryOrSupplementaryVD , dr . DirectoryRecord , str , bool , int ) - > None
'''An internal method to create a new ' dot ' Directory Record .
Parameters :
vd - The volume descriptor to attach the ' dot ' Directory Record to .
parent - The parent Directory Record for new Directory Record .
rock _ ridge - The Rock Ridge version to use for this entry ( if any ) .
xa - Whether this Directory Record should have extended attributes .
file _ mode - The mode to assign to the dot directory ( only applies to Rock Ridge ) .
Returns :
Nothing .'''
|
dot = dr . DirectoryRecord ( )
dot . new_dot ( vd , parent , vd . sequence_number ( ) , rock_ridge , vd . logical_block_size ( ) , xa , file_mode )
self . _add_child_to_dr ( dot , vd . logical_block_size ( ) )
|
def visited ( self ) :
"""Called just after this node has been visited ( with or
without a build ) ."""
|
try :
binfo = self . binfo
except AttributeError : # Apparently this node doesn ' t need build info , so
# don ' t bother calculating or storing it .
pass
else :
self . ninfo . update ( self )
SCons . Node . store_info_map [ self . store_info ] ( self )
|
def apply_driver_hacks ( self , app , info , options ) :
"""Set custom SQLAlchemy engine options :
- Teach it to encode and decode our node objects
- Enable pre - ping ( i . e . , test the DB connection before trying to use it )"""
|
options . update ( dict ( json_serializer = lambda data : json . dumps ( data , default = encode_node ) , json_deserializer = lambda data : json . loads ( data , object_hook = decode_node ) , pool_pre_ping = True , ) )
super ( QuiltSQLAlchemy , self ) . apply_driver_hacks ( app , info , options )
|
def adj_par_names ( self ) :
"""wrapper around pyemu . Pst . adj _ par _ names for list adjustable parameter
names
Returns
adj _ par _ names : list
pyemu . Pst . adj _ par _ names"""
|
if self . __pst is not None :
return self . pst . adj_par_names
else :
return self . jco . par_names
|
def cancel_block_volume ( self , volume_id , reason = 'No longer needed' , immediate = False ) :
"""Cancels the given block storage volume .
: param integer volume _ id : The volume ID
: param string reason : The reason for cancellation
: param boolean immediate _ flag : Cancel immediately or on anniversary date"""
|
block_volume = self . get_block_volume_details ( volume_id , mask = 'mask[id,billingItem[id,hourlyFlag]]' )
if 'billingItem' not in block_volume :
raise exceptions . SoftLayerError ( "Block Storage was already cancelled" )
billing_item_id = block_volume [ 'billingItem' ] [ 'id' ]
if utils . lookup ( block_volume , 'billingItem' , 'hourlyFlag' ) :
immediate = True
return self . client [ 'Billing_Item' ] . cancelItem ( immediate , True , reason , id = billing_item_id )
|
def filter_spent_outputs ( self , outputs ) :
"""Remove outputs that have been spent
Args :
outputs : list of TransactionLink"""
|
links = [ o . to_dict ( ) for o in outputs ]
txs = list ( query . get_spending_transactions ( self . connection , links ) )
spends = { TransactionLink . from_dict ( input_ [ 'fulfills' ] ) for tx in txs for input_ in tx [ 'inputs' ] }
return [ ff for ff in outputs if ff not in spends ]
|
def dropEvent ( self , event ) :
"""Processes the drag drop event using the filter set by the \
setDragDropFilter
: param event | < QDropEvent >"""
|
filt = self . dragDropFilter ( )
if ( not filt ) :
super ( XCalendarWidget , self ) . dropEvent ( event )
return
filt ( self , event )
|
def load_file ( filename , out = sys . stdout ) :
"""load a Python source file and compile it to byte - code
_ load _ file ( filename : string ) : code _ object
filename : name of file containing Python source code
( normally a . py )
code _ object : code _ object compiled from this source code
This function does NOT write any file !"""
|
fp = open ( filename , 'rb' )
try :
source = fp . read ( )
try :
if PYTHON_VERSION < 2.6 :
co = compile ( source , filename , 'exec' )
else :
co = compile ( source , filename , 'exec' , dont_inherit = True )
except SyntaxError :
out . write ( '>>Syntax error in %s\n' % filename )
raise
finally :
fp . close ( )
return co
|
def execute_plan ( plan = None ) :
"""Create , Modify or Delete , depending on plan item ."""
|
execution_result = list ( )
for task in plan :
action = task [ 'action' ]
if action == 'delete' :
command = generate_delete_user_command ( username = task . get ( 'username' ) , manage_home = task [ 'manage_home' ] )
command_output = execute_command ( command )
execution_result . append ( dict ( task = task , command_output = command_output ) )
remove_sudoers_entry ( username = task . get ( 'username' ) )
elif action == 'add' :
command = generate_add_user_command ( proposed_user = task . get ( 'proposed_user' ) , manage_home = task [ 'manage_home' ] )
command_output = execute_command ( command )
if task [ 'proposed_user' ] . public_keys and task [ 'manage_home' ] and task [ 'manage_keys' ] :
write_authorized_keys ( task [ 'proposed_user' ] )
if task [ 'proposed_user' ] . sudoers_entry :
write_sudoers_entry ( username = task [ 'proposed_user' ] . name , sudoers_entry = task [ 'proposed_user' ] . sudoers_entry )
execution_result . append ( dict ( task = task , command_output = command_output ) )
elif action == 'update' :
result = task [ 'user_comparison' ] . get ( 'result' )
# Don ' t modify user if only keys have changed
action_count = 0
for k , _ in iteritems ( result ) :
if '_action' in k :
action_count += 1
command_output = None
if task [ 'manage_home' ] and task [ 'manage_keys' ] and action_count == 1 and 'public_keys_action' in result :
write_authorized_keys ( task [ 'proposed_user' ] )
elif action_count == 1 and 'sudoers_entry_action' in result :
write_sudoers_entry ( username = task [ 'proposed_user' ] . name , sudoers_entry = task [ 'user_comparison' ] [ 'result' ] [ 'replacement_sudoers_entry' ] )
else :
command = generate_modify_user_command ( task = task )
command_output = execute_command ( command )
if task [ 'manage_home' ] and task [ 'manage_keys' ] and result . get ( 'public_keys_action' ) :
write_authorized_keys ( task [ 'proposed_user' ] )
if result . get ( 'sudoers_entry_action' ) :
write_sudoers_entry ( username = task [ 'proposed_user' ] . name , sudoers_entry = task [ 'user_comparison' ] [ 'result' ] [ 'replacement_sudoers_entry' ] )
execution_result . append ( dict ( task = task , command_output = command_output ) )
|
def _run_openstack_cmds ( self , commands , commands_to_log = None , sync = False ) :
"""Execute / sends a CAPI ( Command API ) command to EOS .
In this method , list of commands is appended with prefix and
postfix commands - to make is understandble by EOS .
: param commands : List of command to be executed on EOS .
: param commands _ to _ logs : This should be set to the command that is
logged . If it is None , then the commands
param is logged .
: param sync : This flags indicates that the region is being synced ."""
|
full_command = self . _build_command ( commands , sync = sync )
if commands_to_log :
full_log_command = self . _build_command ( commands_to_log , sync = sync )
else :
full_log_command = None
return self . _run_eos_cmds ( full_command , full_log_command )
|
def command_repo_enable ( self ) :
"""Repositories enable / disable"""
|
if len ( self . args ) == 1 and self . args [ 0 ] == "repo-enable" :
RepoEnable ( ) . choose ( )
else :
usage ( "" )
|
def get_permissions_query ( session , identifier_s ) :
""": type identifier _ s : list"""
|
thedomain = case ( [ ( Domain . name == None , '*' ) ] , else_ = Domain . name )
theaction = case ( [ ( Action . name == None , '*' ) ] , else_ = Action . name )
theresource = case ( [ ( Resource . name == None , '*' ) ] , else_ = Resource . name )
action_agg = func . group_concat ( theaction . distinct ( ) )
resource_agg = func . group_concat ( theresource . distinct ( ) )
return ( session . query ( thedomain + ':' + action_agg + ':' + resource_agg ) . select_from ( User ) . join ( role_membership , User . pk_id == role_membership . c . user_id ) . join ( role_permission , role_membership . c . role_id == role_permission . c . role_id ) . join ( Permission , role_permission . c . permission_id == Permission . pk_id ) . outerjoin ( Domain , Permission . domain_id == Domain . pk_id ) . outerjoin ( Action , Permission . action_id == Action . pk_id ) . outerjoin ( Resource , Permission . resource_id == Resource . pk_id ) . filter ( User . identifier . in_ ( identifier_s ) ) . group_by ( Permission . domain_id , Permission . resource_id ) )
|
async def get_buttons ( self ) :
"""Returns ` buttons ` , but will make an API call to find the
input chat ( needed for the buttons ) unless it ' s already cached ."""
|
if not self . buttons and self . reply_markup :
chat = await self . get_input_chat ( )
if not chat :
return
try :
bot = self . _needed_markup_bot ( )
except ValueError :
await self . _reload_message ( )
bot = self . _needed_markup_bot ( )
# TODO use via _ input _ bot
self . _set_buttons ( chat , bot )
return self . _buttons
|
def _apply ( self , input_string ) :
"""Apply command ."""
|
e = self . editor
# Parse command .
m = COMMAND_GRAMMAR . match ( input_string )
if m is None :
return
variables = m . variables ( )
command = variables . get ( 'command' )
set_option = variables . get ( 'set_option' )
# Preview colorschemes .
if command == 'colorscheme' :
colorscheme = variables . get ( 'colorscheme' )
if colorscheme :
e . use_colorscheme ( colorscheme )
# Preview some set commands .
if command == 'set' :
if set_option in ( 'hlsearch' , 'hls' ) :
e . highlight_search = True
elif set_option in ( 'nohlsearch' , 'nohls' ) :
e . highlight_search = False
elif set_option in ( 'nu' , 'number' ) :
e . show_line_numbers = True
elif set_option in ( 'nonu' , 'nonumber' ) :
e . show_line_numbers = False
elif set_option in ( 'ruler' , 'ru' ) :
e . show_ruler = True
elif set_option in ( 'noruler' , 'noru' ) :
e . show_ruler = False
elif set_option in ( 'relativenumber' , 'rnu' ) :
e . relative_number = True
elif set_option in ( 'norelativenumber' , 'nornu' ) :
e . relative_number = False
elif set_option in ( 'cursorline' , 'cul' ) :
e . cursorline = True
elif set_option in ( 'cursorcolumn' , 'cuc' ) :
e . cursorcolumn = True
elif set_option in ( 'nocursorline' , 'nocul' ) :
e . cursorline = False
elif set_option in ( 'nocursorcolumn' , 'nocuc' ) :
e . cursorcolumn = False
elif set_option in ( 'colorcolumn' , 'cc' ) :
value = variables . get ( 'set_value' , '' )
if value :
e . colorcolumn = [ int ( v ) for v in value . split ( ',' ) if v . isdigit ( ) ]
|
def save_json ( filename : str , config : Union [ List , Dict ] ) :
"""Save JSON data to a file .
Returns True on success ."""
|
try :
data = json . dumps ( config , sort_keys = True , indent = 4 )
with open ( filename , 'w' , encoding = 'utf-8' ) as fdesc :
fdesc . write ( data )
return True
except TypeError as error :
_LOGGER . exception ( 'Failed to serialize to JSON: %s' , filename )
raise PytradfriError ( error )
except OSError as error :
_LOGGER . exception ( 'Saving JSON file failed: %s' , filename )
raise PytradfriError ( error )
|
def add_triple ( self , subject , predicate , object ) :
"""Triple of curied or full iris to add to graph .
Subject should be an interlex"""
|
def filter_ontid ( ontid ) :
if ontid . startswith ( 'http://' ) :
pass
elif ontid . prefix == 'ILXTEMP' :
ontid = 'tmp_' + ontid . suffix
else :
ontid = 'ilx_' + ontid . suffix
return ontid
# this split between annotations and relationships is severely annoying
# because you have to know before hand which one it is ( sigh )
s = OntId ( subject )
p = OntId ( predicate )
o = self . _get_type ( object )
if type ( o ) == str :
func = self . ilx_cli . add_annotation
elif type ( o ) == OntId :
func = self . ilx_cli . add_relationship
o = filter_ontid ( o )
else :
raise TypeError ( f'what are you giving me?! {object!r}' )
s = filter_ontid ( s )
p = filter_ontid ( p )
resp = func ( s , p , o )
return resp
|
def copy ( self ) :
"""Return a copy of the polynomial ."""
|
return Poly ( self . A . copy ( ) , self . dim , self . shape , self . dtype )
|
def convert_rgb_to_symmetric_real ( x ) :
"""Conversion of pixel values to real numbers ."""
|
with tf . name_scope ( "rgb_to_real" , values = [ x ] ) :
x = to_float ( x )
# Convert each pixel intensity in [ 0 , 1 , 2 , . . . , 255 ] into a real number in
# the range [ - 1 , 1 ] .
x = ( x / 127.5 ) - 1
return x
|
def update ( self , webhook_method = values . unset , webhook_filters = values . unset , pre_webhook_url = values . unset , post_webhook_url = values . unset , pre_webhook_retry_count = values . unset , post_webhook_retry_count = values . unset , target = values . unset ) :
"""Update the WebhookInstance
: param unicode webhook _ method : The HTTP method to be used when sending a webhook request .
: param unicode webhook _ filters : The list of webhook event triggers that are enabled for this Service .
: param unicode pre _ webhook _ url : The absolute url the pre - event webhook request should be sent to .
: param unicode post _ webhook _ url : The absolute url the post - event webhook request should be sent to .
: param unicode pre _ webhook _ retry _ count : The number of retries in case of pre - event webhook request failures .
: param unicode post _ webhook _ retry _ count : The number of retries in case of post - event webhook request failures .
: param WebhookInstance . Target target : The routing target of the webhook .
: returns : Updated WebhookInstance
: rtype : twilio . rest . messaging . v1 . webhook . WebhookInstance"""
|
data = values . of ( { 'WebhookMethod' : webhook_method , 'WebhookFilters' : serialize . map ( webhook_filters , lambda e : e ) , 'PreWebhookUrl' : pre_webhook_url , 'PostWebhookUrl' : post_webhook_url , 'PreWebhookRetryCount' : pre_webhook_retry_count , 'PostWebhookRetryCount' : post_webhook_retry_count , 'Target' : target , } )
payload = self . _version . update ( 'POST' , self . _uri , data = data , )
return WebhookInstance ( self . _version , payload , )
|
def compare_lists ( old = None , new = None ) :
'''Compare before and after results from various salt functions , returning a
dict describing the changes that were made'''
|
ret = dict ( )
for item in new :
if item not in old :
ret [ 'new' ] = item
for item in old :
if item not in new :
ret [ 'old' ] = item
return ret
|
def _pcap_check ( cls ) :
"""Performs checks / restart pcap adapter"""
|
if not conf . use_winpcapy : # Winpcap / Npcap isn ' t installed
return
_detect = pcap_service_status ( )
def _ask_user ( ) :
if not conf . interactive :
return False
msg = "Do you want to start it ? (yes/no) [y]: "
try : # Better IPython compatibility
import IPython
return IPython . utils . io . ask_yes_no ( msg , default = 'y' )
except ( NameError , ImportError ) :
while True :
_confir = input ( msg )
_confir = _confir . lower ( ) . strip ( )
if _confir in [ "yes" , "y" , "" ] :
return True
elif _confir in [ "no" , "n" ] :
return False
return False
_error_msg = ( "No match between your pcap and windows " "network interfaces found. " )
if _detect : # No action needed
return
else :
warning ( "Scapy has detected that your pcap service is not running !" )
if not conf . interactive or _ask_user ( ) :
succeed = pcap_service_start ( askadmin = conf . interactive )
if succeed :
log_loading . info ( "Pcap service started !" )
return
_error_msg = "Could not start the pcap service ! "
warning ( _error_msg + "You probably won't be able to send packets. " "Deactivating unneeded interfaces and restarting " "Scapy might help. Check your winpcap/npcap installation " "and access rights." )
|
def parse ( self ) : # pylint : disable = too - many - branches
"""Check if some extra configuration files are existing in an ` alignak . d ` sub directory
near the found configuration file .
Parse the Alignak configuration file ( s )
Exit the script if some errors are encountered .
: return : True / False"""
|
# Search if some ini files existe in an alignak . d sub - directory
sub_directory = 'alignak.d'
dir_name = os . path . dirname ( self . configuration_file )
dir_name = os . path . join ( dir_name , sub_directory )
self . cfg_files = [ self . configuration_file ]
if os . path . exists ( dir_name ) :
for root , _ , walk_files in os . walk ( dir_name , followlinks = True ) :
for found_file in walk_files :
if not re . search ( r"\.ini$" , found_file ) :
continue
self . cfg_files . append ( os . path . join ( root , found_file ) )
print ( "Loading configuration files: %s " % self . cfg_files )
# Read and parse the found configuration files
self . config = configparser . ConfigParser ( )
try :
self . config . read ( self . cfg_files )
if self . config . _sections == { } :
print ( "* bad formatted configuration file: %s " % self . configuration_file )
if self . embedded :
raise ValueError
sys . exit ( 2 )
for section in self . config . sections ( ) :
if self . verbose :
print ( "- section: %s" % section )
for ( key , value ) in self . config . items ( section ) :
inner_property = "%s.%s" % ( section , key )
# Set object property
setattr ( self , inner_property , value )
# Set environment variable
os . environ [ inner_property ] = value
if self . verbose :
print ( " %s = %s" % ( inner_property , value ) )
if self . export : # Allowed shell variables may only contain : [ a - zA - z0-9 _ ]
inner_property = re . sub ( '[^0-9a-zA-Z]+' , '_' , inner_property )
inner_property = inner_property . upper ( )
print ( "export %s=%s" % ( inner_property , cmd_quote ( value ) ) )
except configparser . ParsingError as exp :
print ( "* parsing error in config file : %s\n%s" % ( self . configuration_file , exp . message ) )
if self . embedded :
return False
sys . exit ( 3 )
except configparser . InterpolationMissingOptionError as exp :
print ( "* incorrect or missing variable: %s" % str ( exp ) )
if self . embedded :
return False
sys . exit ( 3 )
if self . verbose :
print ( "Configuration file parsed correctly" )
return True
|
def tailor ( pattern_or_root , dimensions = None , distributed_dim = 'time' , read_only = False ) :
"""Return a TileManager to wrap the root descriptor and tailor all the
dimensions to a specified window .
Keyword arguments :
root - - a NCObject descriptor .
pattern - - a filename string to open a NCObject descriptor .
dimensions - - a dictionary to configurate the dimensions limits ."""
|
return TileManager ( pattern_or_root , dimensions = dimensions , distributed_dim = distributed_dim , read_only = read_only )
|
def login ( self , year , firstname , lastname , passwd , with_year = True ) :
"""Authenticate an user"""
|
firstname = firstname . upper ( )
lastname = lastname . upper ( )
if with_year and not self . set_year ( year ) :
return False
url = URLS [ 'login' ]
params = { 'prenom' : firstname , 'nom' : lastname , 'pwd' : passwd , }
soup = self . post_soup ( url , data = params )
return not soup . select ( 'font[color=red]' )
|
def plot ( dts , title = None , points = None , show = True ) :
"""Plot a distributed timeseries
Args :
dts ( DistTimeseries )
title ( str , optional )
points ( int , optional ) : Limit the number of time points plotted .
If specified , will downsample to use this total number of time points ,
and only fetch back the necessary points to the client for plotting .
Returns :
fig"""
|
if points is not None and len ( dts . tspan ) > points : # then downsample ( TODO : use interpolation )
ix = np . linspace ( 0 , len ( dts . tspan ) - 1 , points ) . astype ( np . int64 )
dts = dts [ ix , ... ]
ts = distob . gather ( dts )
return ts . plot ( title , show )
|
async def add_unit ( self , count = 1 , to = None ) :
"""Add one or more units to this application .
: param int count : Number of units to add
: param str to : Placement directive , e . g . : :
'23 ' - machine 23
' lxc : 7 ' - new lxc container on machine 7
'24 / lxc / 3 ' - lxc container 3 or machine 24
If None , a new machine is provisioned ."""
|
app_facade = client . ApplicationFacade . from_connection ( self . connection )
log . debug ( 'Adding %s unit%s to %s' , count , '' if count == 1 else 's' , self . name )
result = await app_facade . AddUnits ( application = self . name , placement = parse_placement ( to ) if to else None , num_units = count , )
return await asyncio . gather ( * [ asyncio . ensure_future ( self . model . _wait_for_new ( 'unit' , unit_id ) ) for unit_id in result . units ] )
|
def fuzzy ( cls , field , value , boost = None , min_similarity = None , prefix_length = None ) :
'''http : / / www . elasticsearch . org / guide / reference / query - dsl / fuzzy - query . html
A fuzzy based query that uses similarity based on Levenshtein ( edit distance ) algorithm .'''
|
instance = cls ( fuzzy = { field : { 'value' : value } } )
if boost is not None :
instance [ 'fuzzy' ] [ field ] [ 'boost' ] = boost
if min_similarity is not None :
instance [ 'fuzzy' ] [ field ] [ 'min_similarity' ] = min_similarity
if prefix_length is not None :
instance [ 'fuzzy' ] [ field ] [ 'prefix_length' ] = prefix_length
return instance
|
def do_groupby ( environment , value , attribute ) :
"""Group a sequence of objects by a common attribute .
If you for example have a list of dicts or objects that represent persons
with ` gender ` , ` first _ name ` and ` last _ name ` attributes and you want to
group all users by genders you can do something like the following
snippet :
. . sourcecode : : html + jinja
< ul >
{ % for group in persons | groupby ( ' gender ' ) % }
< li > { { group . grouper } } < ul >
{ % for person in group . list % }
< li > { { person . first _ name } } { { person . last _ name } } < / li >
{ % endfor % } < / ul > < / li >
{ % endfor % }
< / ul >
Additionally it ' s possible to use tuple unpacking for the grouper and
list :
. . sourcecode : : html + jinja
< ul >
{ % for grouper , list in persons | groupby ( ' gender ' ) % }
{ % endfor % }
< / ul >
As you can see the item we ' re grouping by is stored in the ` grouper `
attribute and the ` list ` contains all the objects that have this grouper
in common ."""
|
expr = lambda x : environment . getitem ( x , attribute )
return sorted ( map ( _GroupTuple , groupby ( sorted ( value , key = expr ) , expr ) ) )
|
def _parse_row ( row , sep = ',' ) :
'''parse row is a helper function to simply clean up a string , and parse
into a row based on a delimiter . If a required length is provided ,
we check for this too .'''
|
parsed = row . split ( sep )
parsed = [ x for x in parsed if x . strip ( ) ]
return parsed
|
def get_children_metadata ( self ) :
"""Gets the metadata for children .
return : ( osid . Metadata ) - metadata for the children
* compliance : mandatory - - This method must be implemented . *"""
|
metadata = dict ( self . _mdata [ 'children' ] )
metadata . update ( { 'existing_children_values' : self . _my_map [ 'childIds' ] } )
return Metadata ( ** metadata )
|
def hudson_fst ( ac1 , ac2 , fill = np . nan ) :
"""Calculate the numerator and denominator for Fst estimation using the
method of Hudson ( 1992 ) elaborated by Bhatia et al . ( 2013 ) .
Parameters
ac1 : array _ like , int , shape ( n _ variants , n _ alleles )
Allele counts array from the first population .
ac2 : array _ like , int , shape ( n _ variants , n _ alleles )
Allele counts array from the second population .
fill : float
Use this value where there are no pairs to compare ( e . g . ,
all allele calls are missing ) .
Returns
num : ndarray , float , shape ( n _ variants , )
Divergence between the two populations minus average
of diversity within each population .
den : ndarray , float , shape ( n _ variants , )
Divergence between the two populations .
Examples
Calculate numerator and denominator for Fst estimation : :
> > > import allel
> > > g = allel . GenotypeArray ( [ [ [ 0 , 0 ] , [ 0 , 0 ] , [ 1 , 1 ] , [ 1 , 1 ] ] ,
. . . [ [ 0 , 1 ] , [ 0 , 1 ] , [ 0 , 1 ] , [ 0 , 1 ] ] ,
. . . [ [ 0 , 0 ] , [ 0 , 0 ] , [ 0 , 0 ] , [ 0 , 0 ] ] ,
. . . [ [ 0 , 1 ] , [ 1 , 2 ] , [ 1 , 1 ] , [ 2 , 2 ] ] ,
. . . [ [ 0 , 0 ] , [ 1 , 1 ] , [ 0 , 1 ] , [ - 1 , - 1 ] ] ] )
> > > subpops = [ [ 0 , 1 ] , [ 2 , 3 ] ]
> > > ac1 = g . count _ alleles ( subpop = subpops [ 0 ] )
> > > ac2 = g . count _ alleles ( subpop = subpops [ 1 ] )
> > > num , den = allel . hudson _ fst ( ac1 , ac2)
> > > num
array ( [ 1 . , - 0.1666667 , 0 . , - 0.125 , - 0.33333 ] )
> > > den
array ( [ 1 . , 0.5 , 0 . , 0.625 , 0.5 ] )
Estimate Fst for each variant individually : :
> > > fst = num / den
> > > fst
array ( [ 1 . , - 0.33333 , nan , - 0.2 , - 0.666667 ] )
Estimate Fst averaging over variants : :
> > > fst = np . sum ( num ) / np . sum ( den )
> > > fst
0.1428571428571429"""
|
# flake8 : noqa
# check inputs
ac1 = asarray_ndim ( ac1 , 2 )
ac2 = asarray_ndim ( ac2 , 2 )
check_dim0_aligned ( ac1 , ac2 )
ac1 , ac2 = ensure_dim1_aligned ( ac1 , ac2 )
# calculate these once only
an1 = np . sum ( ac1 , axis = 1 )
an2 = np . sum ( ac2 , axis = 1 )
# calculate average diversity ( a . k . a . heterozygosity ) within each
# population
within = ( mean_pairwise_difference ( ac1 , an1 , fill = fill ) + mean_pairwise_difference ( ac2 , an2 , fill = fill ) ) / 2
# calculate divergence ( a . k . a . heterozygosity ) between each population
between = mean_pairwise_difference_between ( ac1 , ac2 , an1 , an2 , fill = fill )
# define numerator and denominator for Fst calculations
num = between - within
den = between
return num , den
|
def _tweak_ns ( tag , ** options ) :
""": param tag : XML tag element
: param nspaces : A namespaces dict , { uri : prefix }
: param options : Extra keyword options may contain ' nspaces ' keyword option
provide a namespace dict , { uri : prefix }
> > > _ tweak _ ns ( " a " , nspaces = { } )
> > > _ tweak _ ns ( " a " , nspaces = { " http : / / example . com / ns / val / " : " val " } )
> > > _ tweak _ ns ( " { http : / / example . com / ns / val / } a " ,
. . . nspaces = { " http : / / example . com / ns / val / " : " val " } )
' val : a '"""
|
nspaces = options . get ( "nspaces" , None )
if nspaces is not None :
matched = _ET_NS_RE . match ( tag )
if matched :
( uri , tag ) = matched . groups ( )
prefix = nspaces . get ( uri , False )
if prefix :
return "%s:%s" % ( prefix , tag )
return tag
|
def plot_moc ( moc , order = None , antialias = 0 , filename = None , projection = 'cart' , color = 'blue' , title = '' , coord_sys = 'C' , graticule = True , ** kwargs ) :
"""Plot a MOC using Healpy .
This generates a plot of the MOC at the specified order , or the MOC ' s
current order if this is not specified . The MOC is flattened at an order
of ` order + antialias ` to generate intermediate color levels .
: param order : HEALPix order at which to generate the plot .
: param antialias : number of additional HEALPix orders to use for
intermediate color levels . ( There can be ` 4 * * antialias ` levels . )
: param filename : file in which to save plot . If not specified then
the plot is shown with ` plt . show ( ) ` .
: param projection : map projection to be used - - - can be shortened to
4 characters . One of :
* ` ' cart [ esian ] ' ` ( uses ` healpy . visufunc . cartview ` )
* ` ' moll [ weide ] ' ` ( uses ` healpy . visufunc . mollview ` )
* ` ' gnom [ onic ] ' ` ( uses ` healpy . visufunc . gnomview ` )
: param color : color scheme .
One of :
* ` ' blue ' `
* ` ' green ' `
* ` ' red ' `
* ` ' black ' `
: param title : title of the plot .
: param coord _ sys : Healpy coordinate system code for the desired plot
coordinates . One of :
* ` ' C ' ` - - - Celestial ( equatorial )
* ` ' G ' ` - - - Galactic
* ` ' E ' ` - - - Ecliptic
: param graticule : whether or not to draw a graticule .
: param \ * \ * kwargs : passed to the selected Healpy plotting function ."""
|
# Process arguments .
plotargs = { 'xsize' : 3200 , 'cbar' : False , 'notext' : True }
if order is None :
order = moc . order
if projection . startswith ( 'cart' ) :
plotter = healpy . visufunc . cartview
elif projection . startswith ( 'moll' ) :
plotter = healpy . visufunc . mollview
elif projection . startswith ( 'gnom' ) :
plotter = healpy . visufunc . gnomview
else :
raise ValueError ( 'Unknown projection: {0}' . format ( projection ) )
if color == 'blue' :
plotargs [ 'cmap' ] = LinearSegmentedColormap . from_list ( 'white-blue' , [ '#FFFFFF' , '#0000AA' ] )
elif color == 'green' :
plotargs [ 'cmap' ] = LinearSegmentedColormap . from_list ( 'white-green' , [ '#FFFFFF' , '#008800' ] )
elif color == 'red' :
plotargs [ 'cmap' ] = LinearSegmentedColormap . from_list ( 'white-red' , [ '#FFFFFF' , '#FF0000' ] )
elif color == 'black' :
plotargs [ 'cmap' ] = LinearSegmentedColormap . from_list ( 'white-black' , [ '#FFFFFF' , '#000000' ] )
else :
raise ValueError ( 'Unknown color: {0}' . format ( color ) )
if coord_sys == 'C' :
pass
elif coord_sys == 'G' :
plotargs [ 'coord' ] = ( 'C' , 'G' )
elif coord_sys == 'E' :
plotargs [ 'coord' ] = ( 'C' , 'E' )
else :
raise ValueError ( 'Unknown coordinate system: {0}' . format ( coord_sys ) )
# Any other arguments are passed the Healpy plotter directly .
plotargs . update ( kwargs )
# Create a Numpy array which is zero for points outside the MOC and one
# for points inside the MOC .
map = np . zeros ( 12 * 4 ** order )
antialias_shift = 2 * antialias
for cell in moc . flattened ( order + antialias ) :
map [ cell >> antialias_shift ] += 1.0
# Plot the Numpy array using Healpy .
plotter ( map , nest = True , title = title , ** plotargs )
if graticule :
healpy . visufunc . graticule ( )
if filename is not None :
plt . savefig ( filename )
else :
plt . show ( )
|
def success ( self ) :
"""Checks for the presence of errors in the response . Returns ` ` True ` ` if
all is well , ` ` False ` ` otherwise .
: rtype : bool
: returns ` ` True ` ` if PayPal says our query was successful ."""
|
return self . ack . upper ( ) in ( self . config . ACK_SUCCESS , self . config . ACK_SUCCESS_WITH_WARNING )
|
def create_data_figs ( self ) :
"""Generate the data and figs files for the report
: return :"""
|
logger . info ( "Generating the report data and figs from %s to %s" , self . start , self . end )
for section in self . sections ( ) :
logger . info ( "Generating %s" , section )
self . sections ( ) [ section ] ( )
logger . info ( "Data and figs done" )
|
def is_redundant_union_item ( first , other ) : # type : ( AbstractType , AbstractType ) - > bool
"""If union has both items , is the first one redundant ?
For example , if first is ' str ' and the other is ' Text ' , return True .
If items are equal , return False ."""
|
if isinstance ( first , ClassType ) and isinstance ( other , ClassType ) :
if first . name == 'str' and other . name == 'Text' :
return True
elif first . name == 'bool' and other . name == 'int' :
return True
elif first . name == 'int' and other . name == 'float' :
return True
elif ( first . name in ( 'List' , 'Dict' , 'Set' ) and other . name == first . name ) :
if not first . args and other . args :
return True
elif len ( first . args ) == len ( other . args ) and first . args :
result = all ( first_arg == other_arg or other_arg == AnyType ( ) for first_arg , other_arg in zip ( first . args , other . args ) )
return result
return False
|
def is_dragon ( host , timeout = 1 ) :
"""Check if host is a dragon .
Check if the specified host is a dragon based on simple heuristic .
The code simply checks if particular strings are in the index page .
It should work for DragonMint or Innosilicon branded miners ."""
|
try :
r = requests . get ( 'http://{}/' . format ( host ) , timeout = timeout )
if r . status_code == 200 :
if '<title>DragonMint</title>' in r . text or '<title>AsicMiner</title>' in r . text :
return True
except requests . exceptions . RequestException :
pass
return False
|
def find_commons ( lists ) :
"""Finds common values
: param lists : List of lists
: return : List of values that are in common between inner lists"""
|
others = lists [ 1 : ]
return [ val for val in lists [ 0 ] if is_in_all ( val , others ) ]
|
def send_request ( self , * args , ** kwargs ) :
"""Wrapper for session . request
Handle connection reset error even from pyopenssl"""
|
try :
return self . session . request ( * args , ** kwargs )
except ConnectionError :
self . session . close ( )
return self . session . request ( * args , ** kwargs )
|
def _calculate_fnr_fdr ( group ) :
"""Calculate the false negative rate ( 1 - sensitivity ) and false discovery rate ( 1 - precision ) ."""
|
data = { k : d [ "value" ] for k , d in group . set_index ( "metric" ) . T . to_dict ( ) . items ( ) }
return pd . DataFrame ( [ { "fnr" : data [ "fn" ] / float ( data [ "tp" ] + data [ "fn" ] ) * 100.0 if data [ "tp" ] > 0 else 0.0 , "fdr" : data [ "fp" ] / float ( data [ "tp" ] + data [ "fp" ] ) * 100.0 if data [ "tp" ] > 0 else 0.0 , "tpr" : "TP: %s FN: %s" % ( data [ "tp" ] , data [ "fn" ] ) , "spc" : "FP: %s" % ( data [ "fp" ] ) } ] )
|
def _generate_examples ( self , filepath ) :
"""This function returns the examples in the raw ( text ) form ."""
|
rows_per_pair_id = collections . defaultdict ( list )
with tf . io . gfile . GFile ( filepath ) as f :
reader = csv . DictReader ( f , delimiter = '\t' , quoting = csv . QUOTE_NONE )
for row in reader :
rows_per_pair_id [ row [ 'pairID' ] ] . append ( row )
for rows in six . itervalues ( rows_per_pair_id ) :
premise = { row [ 'language' ] : row [ 'sentence1' ] for row in rows }
hypothesis = { row [ 'language' ] : row [ 'sentence2' ] for row in rows }
yield { 'premise' : premise , 'hypothesis' : hypothesis , 'label' : rows [ 0 ] [ 'gold_label' ] , }
|
def SignedVarintEncode ( value ) :
"""Encode a signed integer as a zigzag encoded signed integer ."""
|
result = b""
if value < 0 :
value += ( 1 << 64 )
bits = value & 0x7f
value >>= 7
while value :
result += HIGH_CHR_MAP [ bits ]
bits = value & 0x7f
value >>= 7
result += CHR_MAP [ bits ]
return result
|
def session_id ( self ) :
"""The session ID in header type format . Can be inserted into a
connection if necessary using : :
{ ' Cookie ' : session . session _ id }
: rtype : str"""
|
return None if not self . session or 'JSESSIONID' not in self . session . cookies else 'JSESSIONID={}' . format ( self . session . cookies [ 'JSESSIONID' ] )
|
def _get_samtools0_path ( self ) :
"""Retrieve PATH to the samtools version specific for eriscript ."""
|
samtools_path = os . path . realpath ( os . path . join ( self . _get_ericscript_path ( ) , ".." , ".." , "bin" ) )
return samtools_path
|
def _jx_expression ( expr , lang ) :
"""WRAP A JSON EXPRESSION WITH OBJECT REPRESENTATION"""
|
if is_expression ( expr ) : # CONVERT TO lang
new_op = lang [ expr . id ]
if not new_op : # CAN NOT BE FOUND , TRY SOME PARTIAL EVAL
return language [ expr . id ] . partial_eval ( )
return expr
# return new _ op ( expr . args ) # THIS CAN BE DONE , BUT IT NEEDS MORE CODING , AND I WOULD EXPECT IT TO BE SLOW
if expr is None :
return TRUE
elif expr in ( True , False , None ) or expr == None or isinstance ( expr , ( float , int , Decimal , Date ) ) :
return Literal ( expr )
elif is_text ( expr ) :
return Variable ( expr )
elif is_sequence ( expr ) :
return lang [ TupleOp ( [ _jx_expression ( e , lang ) for e in expr ] ) ]
# expr = wrap ( expr )
try :
items = items_ ( expr )
for op , term in items : # ONE OF THESE IS THE OPERATOR
full_op = operators . get ( op )
if full_op :
class_ = lang . ops [ full_op . id ]
if class_ :
return class_ . define ( expr )
# THIS LANGUAGE DOES NOT SUPPORT THIS OPERATOR , GOTO BASE LANGUAGE AND GET THE MACRO
class_ = language [ op . id ]
output = class_ . define ( expr ) . partial_eval ( )
return _jx_expression ( output , lang )
else :
if not items :
return NULL
raise Log . error ( "{{instruction|json}} is not known" , instruction = items )
except Exception as e :
Log . error ( "programmer error expr = {{value|quote}}" , value = expr , cause = e )
|
def exit ( self , code = None , msg = None ) :
"""Application exit method with proper exit code
The method will run the Python standard sys . exit ( ) with the exit code
previously defined via : py : meth : ` ~ tcex . tcex . TcEx . exit _ code ` or provided
during the call of this method .
Args :
code ( Optional [ integer ] ) : The exit code value for the app .
msg ( Optional [ string ] ) : A message to log and add to message tc output ."""
|
# add exit message to message . tc file and log
if msg is not None :
if code in [ 0 , 3 ] or ( code is None and self . exit_code in [ 0 , 3 ] ) :
self . log . info ( msg )
else :
self . log . error ( msg )
self . message_tc ( msg )
if code is None :
code = self . exit_code
elif code in [ 0 , 1 , 3 ] :
pass
else :
self . log . error ( u'Invalid exit code' )
code = 1
if self . default_args . tc_aot_enabled : # push exit message
self . playbook . aot_rpush ( code )
self . log . info ( u'Exit Code: {}' . format ( code ) )
sys . exit ( code )
|
def checksum ( source : bytes ) -> int :
"""Calculates the checksum of the input bytes .
RFC1071 : https : / / tools . ietf . org / html / rfc1071
RFC792 : https : / / tools . ietf . org / html / rfc792
Args :
source : The input to be calculated .
Returns :
Calculated checksum ."""
|
if len ( source ) % 2 : # if the total length is odd , padding with one octet of zeros for computing the checksum
source += b'\x00'
sum = 0
for i in range ( 0 , len ( source ) , 2 ) :
sum = ones_comp_sum16 ( sum , ( source [ i + 1 ] << 8 ) + source [ i ] )
return ~ sum & 0xffff
|
def recursive_unicode ( obj : Any ) -> Any :
"""Walks a simple data structure , converting byte strings to unicode .
Supports lists , tuples , and dictionaries ."""
|
if isinstance ( obj , dict ) :
return dict ( ( recursive_unicode ( k ) , recursive_unicode ( v ) ) for ( k , v ) in obj . items ( ) )
elif isinstance ( obj , list ) :
return list ( recursive_unicode ( i ) for i in obj )
elif isinstance ( obj , tuple ) :
return tuple ( recursive_unicode ( i ) for i in obj )
elif isinstance ( obj , bytes ) :
return to_unicode ( obj )
else :
return obj
|
def on_Exception ( self , e ) :
"""Handle a generic exception ."""
|
if logging . getLogger ( self . LOGGER_NAME ) . level == logging . DEBUG :
import traceback
traceback . print_exception ( * sys . exc_info ( ) )
else :
from PyQt5 import QtCore
sys . stderr . write ( """An unexpected error occurred.
Check that you are using the latest version of PyQt5 and send an error report to
support@riverbankcomputing.com, including the following information:
* your version of PyQt (%s)
* the UI file that caused this error
* the debug output of pyuic5 (use the -d flag when calling pyuic5)
""" % QtCore . PYQT_VERSION_STR )
|
def launchBootstraps ( ) :
"""Launch the bootstrap instances in separate subprocesses"""
|
global processes
worker_amount , verbosity , args = getArgs ( )
was_origin = False
if verbosity >= 1 :
sys . stderr . write ( "Launching {0} worker(s) using {1}.\n" . format ( worker_amount , os . environ [ 'SHELL' ] if 'SHELL' in os . environ else "an unknown shell" , ) )
sys . stderr . flush ( )
processes = [ ]
for _ in range ( worker_amount ) :
command = [ sys . executable , "-m" , BOOTSTRAP_MODULE ] + args
if verbosity >= 3 :
sys . stderr . write ( "Executing '{0}'...\n" . format ( command ) )
sys . stderr . flush ( )
processes . append ( Popen ( command ) )
# Only have a single origin
try :
args . remove ( "--origin" )
except ValueError :
pass
else :
was_origin = True
if was_origin : # Only wait on the origin , this will return and notify the launcher
# the the job has finished and start the cleanup phase
try :
processes [ 0 ] . wait ( )
except KeyboardInterrupt :
pass
else :
for p in processes :
p . wait ( )
|
def find_bucket_keys ( bucket_name , regex , region_name = None , aws_access_key_id = None , aws_secret_access_key = None ) :
"""Finds a list of S3 keys matching the passed regex
Given a regular expression , this method searches the S3 bucket
for matching keys , and returns an array of strings for matched
keys , an empty array if non are found .
: param regex : ( str ) Regular expression to use is the key search
: param bucket _ name : ( str ) String S3 bucket name
: param region _ name : ( str ) AWS region for the S3 bucket ( optional )
: param aws _ access _ key _ id : ( str ) AWS Access Key ID ( optional )
: param aws _ secret _ access _ key : ( str ) AWS Secret Access Key ( optional )
: return : Array of strings containing matched S3 keys"""
|
log = logging . getLogger ( mod_logger + '.find_bucket_keys' )
matched_keys = [ ]
if not isinstance ( regex , basestring ) :
log . error ( 'regex argument is not a string, found: {t}' . format ( t = regex . __class__ . __name__ ) )
return None
if not isinstance ( bucket_name , basestring ) :
log . error ( 'bucket_name argument is not a string, found: {t}' . format ( t = bucket_name . __class__ . __name__ ) )
return None
# Set up S3 resources
s3resource = boto3 . resource ( 's3' , region_name = region_name , aws_access_key_id = aws_access_key_id , aws_secret_access_key = aws_secret_access_key )
bucket = s3resource . Bucket ( bucket_name )
log . info ( 'Looking up S3 keys based on regex: {r}' . format ( r = regex ) )
for item in bucket . objects . all ( ) :
log . debug ( 'Checking if regex matches key: {k}' . format ( k = item . key ) )
match = re . search ( regex , item . key )
if match :
matched_keys . append ( item . key )
log . info ( 'Found matching keys: {k}' . format ( k = matched_keys ) )
return matched_keys
|
def _transpose ( cls , char ) :
"""Convert unicode char to something similar to it ."""
|
try :
loc = ord ( char ) - 65
if loc < 0 or loc > 56 :
return char
return cls . UNICODE_MAP [ loc ]
except UnicodeDecodeError :
return char
|
def has_missing_break ( real_seg , pred_seg ) :
"""Parameters
real _ seg : list of integers
The segmentation as it should be .
pred _ seg : list of integers
The predicted segmentation .
Returns
bool :
True , if strokes of two different symbols are put in the same symbol ."""
|
for symbol_pred in pred_seg :
for symbol_real in real_seg :
if symbol_pred [ 0 ] in symbol_real :
for stroke in symbol_pred :
if stroke not in symbol_real :
return True
return False
|
def _prepare_xml ( options = None , state = None ) :
'''Get the requests options from salt .'''
|
if state :
_state = '0'
else :
_state = '2'
xml = "<?xml version='1.0'?>\n<checkresults>\n"
# No service defined then we set the status of the hostname
if 'service' in options and options [ 'service' ] != '' :
xml += "<checkresult type='service' checktype='" + six . text_type ( options [ 'checktype' ] ) + "'>"
xml += "<hostname>" + cgi . escape ( options [ 'hostname' ] , True ) + "</hostname>"
xml += "<servicename>" + cgi . escape ( options [ 'service' ] , True ) + "</servicename>"
else :
xml += "<checkresult type='host' checktype='" + six . text_type ( options [ 'checktype' ] ) + "'>"
xml += "<hostname>" + cgi . escape ( options [ 'hostname' ] , True ) + "</hostname>"
xml += "<state>" + _state + "</state>"
if 'output' in options :
xml += "<output>" + cgi . escape ( options [ 'output' ] , True ) + "</output>"
xml += "</checkresult>"
xml += "\n</checkresults>"
return xml
|
def value ( self ) :
"""return float Cumulative Distribution Function .
The return value represents a floating point number of the CDF of the
largest eigenvalue of a Wishart ( n , p ) evaluated at chisq _ val ."""
|
wishart = self . _wishart_cdf
# Prepare variables for integration algorithm
A = self . A
p = self . _gammainc_a
g = gamma ( wishart . alpha_vec )
q_ind = np . arange ( 2 * wishart . n_min - 2 )
q_vec = 2 * wishart . alpha + q_ind + 2
q = np . float_power ( 0.5 , q_vec ) * gamma ( q_vec ) * gammainc ( q_vec , self . _chisq_val )
# Perform integration ( i . e . calculate Pfaffian CDF )
for i in xrange ( wishart . n_min ) : # TODO consider index tricks instead of iteration here
b = 0.5 * p [ i ] * p [ i ]
for j in xrange ( i , wishart . n_min - 1 ) :
b -= q [ i + j ] / ( g [ i ] * g [ j + 1 ] )
A [ j + 1 , i ] = p [ i ] * p [ j + 1 ] - 2 * b
A [ i , j + 1 ] = - A [ j + 1 , i ]
if np . any ( np . isnan ( A ) ) :
return 0
return np . sqrt ( det ( A ) )
|
def masters_by_queue ( self , region , queue ) :
"""Get the master league for a given queue .
: param string region : the region to execute this request on
: param string queue : the queue to get the master players for
: returns : LeagueListDTO"""
|
url , query = LeagueApiV4Urls . master_by_queue ( region = region , queue = queue )
return self . _raw_request ( self . masters_by_queue . __name__ , region , url , query )
|
def get_repository_query_session ( self ) :
"""Gets the repository query session .
return : ( osid . repository . RepositoryQuerySession ) - a
` ` RepositoryQuerySession ` `
raise : OperationFailed - unable to complete request
raise : Unimplemented - ` ` supports _ repository _ query ( ) ` ` is
` ` false ` `
* compliance : optional - - This method must be implemented if
` ` supports _ repository _ query ( ) ` ` is ` ` true ` ` . *"""
|
if not self . supports_repository_query ( ) :
raise errors . Unimplemented ( )
# pylint : disable = no - member
return sessions . RepositoryQuerySession ( runtime = self . _runtime )
|
def _set_properties ( self , flags ) :
"""Set the properties of the message flags based on a byte input ."""
|
flagByte = self . _normalize ( flags )
if flagByte is not None :
self . _messageType = ( flagByte [ 0 ] & 0xe0 ) >> 5
self . _extended = ( flagByte [ 0 ] & MESSAGE_FLAG_EXTENDED_0X10 ) >> 4
self . _hopsLeft = ( flagByte [ 0 ] & 0x0c ) >> 2
self . _hopsMax = flagByte [ 0 ] & 0x03
else :
self . _messageType = None
self . _extended = None
self . _hopsLeft = None
self . _hopsMax = None
|
def cmd_gimbal ( self , args ) :
'''control gimbal'''
|
usage = 'Usage: gimbal <rate|point|roi|roivel|mode|status>'
if len ( args ) == 0 :
print ( usage )
return
if args [ 0 ] == 'rate' :
self . cmd_gimbal_rate ( args [ 1 : ] )
elif args [ 0 ] == 'point' :
self . cmd_gimbal_point ( args [ 1 : ] )
elif args [ 0 ] == 'roi' :
self . cmd_gimbal_roi ( args [ 1 : ] )
elif args [ 0 ] == 'mode' :
self . cmd_gimbal_mode ( args [ 1 : ] )
elif args [ 0 ] == 'status' :
self . cmd_gimbal_status ( args [ 1 : ] )
elif args [ 0 ] == 'roivel' :
self . cmd_gimbal_roi_vel ( args [ 1 : ] )
|
def send_command_return_multilines ( self , obj , command , * arguments ) :
"""Send command and wait for multiple lines output ."""
|
index_command = obj . _build_index_command ( command , * arguments )
return self . chassis_list [ obj . chassis ] . sendQuery ( index_command , True )
|
def ToJson ( self ) :
"""Convert object members to a dictionary that can be parsed as JSON .
Returns :
dict :"""
|
json = super ( StateTransaction , self ) . ToJson ( )
descriptors = [ d . ToJson ( ) for d in self . Descriptors ]
json [ 'descriptors' ] = descriptors
return json
|
def get_upsampling_weight ( in_channels , out_channels , kernel_size ) :
"""Make a 2D bilinear kernel suitable for upsampling"""
|
factor = ( kernel_size + 1 ) // 2
if kernel_size % 2 == 1 :
center = factor - 1
else :
center = factor - 0.5
og = np . ogrid [ : kernel_size , : kernel_size ]
filt = ( 1 - abs ( og [ 0 ] - center ) / factor ) * ( 1 - abs ( og [ 1 ] - center ) / factor )
weight = np . zeros ( ( in_channels , out_channels , kernel_size , kernel_size ) , dtype = np . float64 )
weight [ range ( in_channels ) , range ( out_channels ) , : , : ] = filt
return torch . from_numpy ( weight ) . float ( )
|
def t_php_START_HEREDOC ( t ) :
r'< < < [ \ t ] * ( ? P < label > [ A - Za - z _ ] [ \ w _ ] * ) \ n'
|
t . lexer . lineno += t . value . count ( "\n" )
t . lexer . push_state ( 'heredoc' )
t . lexer . heredoc_label = t . lexer . lexmatch . group ( 'label' )
return t
|
def _componentSortKey ( componentAndType ) :
"""Sort SET components by tag
Sort regardless of the Choice value ( static sort )"""
|
component , asn1Spec = componentAndType
if asn1Spec is None :
asn1Spec = component
if asn1Spec . typeId == univ . Choice . typeId and not asn1Spec . tagSet :
if asn1Spec . tagSet :
return asn1Spec . tagSet
else :
return asn1Spec . componentType . minTagSet
else :
return asn1Spec . tagSet
|
def copy ( self , * , continuations : List [ memoryview ] = None , expected : Sequence [ Type [ 'Parseable' ] ] = None , list_expected : Sequence [ Type [ 'Parseable' ] ] = None , command_name : bytes = None , uid : bool = None , charset : str = None , tag : bytes = None , max_append_len : int = None , allow_continuations : bool = None ) -> 'Params' :
"""Copy the parameters , possibly replacing a subset ."""
|
kwargs : Dict [ str , Any ] = { }
self . _set_if_none ( kwargs , 'continuations' , continuations )
self . _set_if_none ( kwargs , 'expected' , expected )
self . _set_if_none ( kwargs , 'list_expected' , list_expected )
self . _set_if_none ( kwargs , 'command_name' , command_name )
self . _set_if_none ( kwargs , 'uid' , uid )
self . _set_if_none ( kwargs , 'charset' , charset )
self . _set_if_none ( kwargs , 'tag' , tag )
self . _set_if_none ( kwargs , 'max_append_len' , max_append_len )
self . _set_if_none ( kwargs , 'allow_continuations' , allow_continuations )
return Params ( ** kwargs )
|
def apply_line_types ( network ) :
"""Calculate line electrical parameters x , r , b , g from standard
types ."""
|
lines_with_types_b = network . lines . type != ""
if lines_with_types_b . zsum ( ) == 0 :
return
missing_types = ( pd . Index ( network . lines . loc [ lines_with_types_b , 'type' ] . unique ( ) ) . difference ( network . line_types . index ) )
assert missing_types . empty , ( "The type(s) {} do(es) not exist in network.line_types" . format ( ", " . join ( missing_types ) ) )
# Get a copy of the lines data
l = ( network . lines . loc [ lines_with_types_b , [ "type" , "length" , "num_parallel" ] ] . join ( network . line_types , on = 'type' ) )
for attr in [ "r" , "x" ] :
l [ attr ] = l [ attr + "_per_length" ] * l [ "length" ] / l [ "num_parallel" ]
l [ "b" ] = 2 * np . pi * 1e-9 * l [ "f_nom" ] * l [ "c_per_length" ] * l [ "length" ] * l [ "num_parallel" ]
# now set calculated values on live lines
for attr in [ "r" , "x" , "b" ] :
network . lines . loc [ lines_with_types_b , attr ] = l [ attr ]
|
def join ( self , timeout : float = None ) -> bool :
"""Joins on the thread associated with the response if it exists , or
just returns after a no - op if no thread exists to join .
: param timeout :
Maximum number of seconds to block on the join before given up
and continuing operation . The default ` None ` value will wait
forever .
: return :
A boolean indicating whether or not a thread existed to join
upon ."""
|
try :
self . thread . join ( timeout )
return True
except AttributeError :
return False
|
def lvcreate ( lvname , vgname , size = None , extents = None , snapshot = None , pv = None , thinvolume = False , thinpool = False , force = False , ** kwargs ) :
'''Create a new logical volume , with option for which physical volume to be used
CLI Examples :
. . code - block : : bash
salt ' * ' lvm . lvcreate new _ volume _ name vg _ name size = 10G
salt ' * ' lvm . lvcreate new _ volume _ name vg _ name extents = 100 pv = / dev / sdb
salt ' * ' lvm . lvcreate new _ snapshot vg _ name snapshot = volume _ name size = 3G
. . versionadded : : to _ complete
Support for thin pools and thin volumes
CLI Examples :
. . code - block : : bash
salt ' * ' lvm . lvcreate new _ thinpool _ name vg _ name size = 20G thinpool = True
salt ' * ' lvm . lvcreate new _ thinvolume _ name vg _ name / thinpool _ name size = 10G thinvolume = True'''
|
if size and extents :
return 'Error: Please specify only one of size or extents'
if thinvolume and thinpool :
return 'Error: Please set only one of thinvolume or thinpool to True'
valid = ( 'activate' , 'chunksize' , 'contiguous' , 'discards' , 'stripes' , 'stripesize' , 'minor' , 'persistent' , 'mirrors' , 'noudevsync' , 'monitor' , 'ignoremonitoring' , 'permission' , 'poolmetadatasize' , 'readahead' , 'regionsize' , 'type' , 'virtualsize' , 'zero' )
no_parameter = ( 'noudevsync' , 'ignoremonitoring' , 'thin' , )
extra_arguments = [ ]
if kwargs :
for k , v in six . iteritems ( kwargs ) :
if k in no_parameter :
extra_arguments . append ( '--{0}' . format ( k ) )
elif k in valid :
extra_arguments . extend ( [ '--{0}' . format ( k ) , '{0}' . format ( v ) ] )
cmd = [ salt . utils . path . which ( 'lvcreate' ) ]
if thinvolume :
cmd . extend ( [ '--thin' , '-n' , lvname ] )
elif thinpool :
cmd . extend ( [ '--thinpool' , lvname ] )
else :
cmd . extend ( [ '-n' , lvname ] )
if snapshot :
cmd . extend ( [ '-s' , '{0}/{1}' . format ( vgname , snapshot ) ] )
else :
cmd . append ( vgname )
if size and thinvolume :
cmd . extend ( [ '-V' , '{0}' . format ( size ) ] )
elif extents and thinvolume :
return 'Error: Thin volume size cannot be specified as extents'
elif size :
cmd . extend ( [ '-L' , '{0}' . format ( size ) ] )
elif extents :
cmd . extend ( [ '-l' , '{0}' . format ( extents ) ] )
else :
return 'Error: Either size or extents must be specified'
if pv :
cmd . append ( pv )
if extra_arguments :
cmd . extend ( extra_arguments )
if force :
cmd . append ( '--yes' )
out = __salt__ [ 'cmd.run' ] ( cmd , python_shell = False ) . splitlines ( )
lvdev = '/dev/{0}/{1}' . format ( vgname , lvname )
lvdata = lvdisplay ( lvdev )
lvdata [ 'Output from lvcreate' ] = out [ 0 ] . strip ( )
return lvdata
|
def crypto_sign_keypair ( seed = None ) :
"""Return ( verifying , secret ) key from a given seed , or os . urandom ( 32)"""
|
if seed is None :
seed = os . urandom ( PUBLICKEYBYTES )
else :
warnings . warn ( "ed25519ll should choose random seed." , RuntimeWarning )
if len ( seed ) != 32 :
raise ValueError ( "seed must be 32 random bytes or None." )
skbytes = seed
vkbytes = djbec . publickey ( skbytes )
return Keypair ( vkbytes , skbytes + vkbytes )
|
def define_simulation_graph ( batch_env , algo_cls , config ) :
"""Define the algorithm and environment interaction .
Args :
batch _ env : In - graph environments object .
algo _ cls : Constructor of a batch algorithm .
config : Configuration object for the algorithm .
Returns :
Object providing graph elements via attributes ."""
|
# pylint : disable = unused - variable
step = tf . Variable ( 0 , False , dtype = tf . int32 , name = 'global_step' )
is_training = tf . placeholder ( tf . bool , name = 'is_training' )
should_log = tf . placeholder ( tf . bool , name = 'should_log' )
do_report = tf . placeholder ( tf . bool , name = 'do_report' )
force_reset = tf . placeholder ( tf . bool , name = 'force_reset' )
algo = algo_cls ( batch_env , step , is_training , should_log , config )
done , score , summary = tools . simulate ( batch_env , algo , should_log , force_reset )
message = 'Graph contains {} trainable variables.'
tf . logging . info ( message . format ( tools . count_weights ( ) ) )
# pylint : enable = unused - variable
return tools . AttrDict ( locals ( ) )
|
def assert_is_not_substring ( substring , subject , message = None , extra = None ) :
"""Raises an AssertionError if substring is a substring of subject ."""
|
assert ( ( subject is not None ) and ( substring is not None ) and ( subject . find ( substring ) == - 1 ) ) , _assert_fail_message ( message , substring , subject , "is in" , extra )
|
def _connect ( self ) :
"""Connect to PostgreSQL , either by reusing a connection from the pool
if possible , or by creating the new connection .
: rtype : psycopg2 . extensions . connection
: raises : pool . NoIdleConnectionsError"""
|
# Attempt to get a cached connection from the connection pool
try :
connection = self . _pool_manager . get ( self . pid , self )
LOGGER . debug ( "Re-using connection for %s" , self . pid )
except pool . NoIdleConnectionsError :
if self . _pool_manager . is_full ( self . pid ) :
raise
# Create a new PostgreSQL connection
kwargs = utils . uri_to_kwargs ( self . _uri )
LOGGER . debug ( "Creating a new connection for %s" , self . pid )
connection = self . _psycopg2_connect ( kwargs )
self . _pool_manager . add ( self . pid , connection )
self . _pool_manager . lock ( self . pid , connection , self )
# Added in because psycopg2ct connects and leaves the connection in
# a weird state : consts . STATUS _ DATESTYLE , returning from
# Connection . _ setup without setting the state as const . STATUS _ OK
if utils . PYPY :
connection . reset ( )
# Register the custom data types
self . _register_unicode ( connection )
self . _register_uuid ( connection )
return connection
|
def run ( cls , row , reader ) :
"""Invoke the CSV parser on an individual row
The row should already be a dict from the CSV reader .
The reader is passed in so we can easily reference the
CSV document headers & line number when generating
errors ."""
|
cls . _parse_keys ( row , reader . line_num )
cls . _parse_relationships ( row , reader . line_num )
|
def parseMalformedBamHeader ( headerDict ) :
"""Parses the ( probably ) intended values out of the specified
BAM header dictionary , which is incompletely parsed by pysam .
This is caused by some tools incorrectly using spaces instead
of tabs as a seperator ."""
|
headerString = " " . join ( "{}:{}" . format ( k , v ) for k , v in headerDict . items ( ) if k != 'CL' )
ret = { }
for item in headerString . split ( ) :
key , value = item . split ( ":" , 1 )
# build up dict , casting everything back to original type
ret [ key ] = type ( headerDict . get ( key , "" ) ) ( value )
if 'CL' in headerDict :
ret [ 'CL' ] = headerDict [ 'CL' ]
return ret
|
def find_one_and_update ( self , filter , update , ** kwargs ) :
"""See http : / / api . mongodb . com / python / current / api / pymongo / collection . html # pymongo . collection . Collection . find _ one _ and _ update"""
|
self . _arctic_lib . check_quota ( )
return self . _collection . find_one_and_update ( filter , update , ** kwargs )
|
def find_germanet_xml_files ( xml_path ) :
'''Globs the XML files contained in the given directory and sorts
them into sections for import into the MongoDB database .
Arguments :
- ` xml _ path ` : the path to the directory containing the GermaNet
XML files'''
|
xml_files = sorted ( glob . glob ( os . path . join ( xml_path , '*.xml' ) ) )
# sort out the lexical files
lex_files = [ xml_file for xml_file in xml_files if re . match ( r'(adj|nomen|verben)\.' , os . path . basename ( xml_file ) . lower ( ) ) ]
xml_files = sorted ( set ( xml_files ) - set ( lex_files ) )
if not lex_files :
print ( 'ERROR: cannot find lexical information files' )
# sort out the GermaNet relations file
gn_rels_file = [ xml_file for xml_file in xml_files if os . path . basename ( xml_file ) . lower ( ) == 'gn_relations.xml' ]
xml_files = sorted ( set ( xml_files ) - set ( gn_rels_file ) )
if not gn_rels_file :
print ( 'ERROR: cannot find relations file gn_relations.xml' )
gn_rels_file = None
else :
if 1 < len ( gn_rels_file ) :
print ( 'WARNING: more than one relations file gn_relations.xml, ' 'taking first match' )
gn_rels_file = gn_rels_file [ 0 ]
# sort out the wiktionary paraphrase files
wiktionary_files = [ xml_file for xml_file in xml_files if re . match ( r'wiktionaryparaphrases-' , os . path . basename ( xml_file ) . lower ( ) ) ]
xml_files = sorted ( set ( xml_files ) - set ( wiktionary_files ) )
if not wiktionary_files :
print ( 'WARNING: cannot find wiktionary paraphrase files' )
# sort out the interlingual index file
ili_files = [ xml_file for xml_file in xml_files if os . path . basename ( xml_file ) . lower ( ) . startswith ( 'interlingualindex' ) ]
xml_files = sorted ( set ( xml_files ) - set ( ili_files ) )
if not ili_files :
print ( 'WARNING: cannot find interlingual index file' )
if xml_files :
print ( 'WARNING: unrecognised xml files:' , xml_files )
return lex_files , gn_rels_file , wiktionary_files , ili_files
|
def subjects ( auth , label = None , project = None ) :
'''Retrieve Subject tuples for subjects returned by this function .
Example :
> > > import yaxil
> > > auth = yaxil . XnatAuth ( url = ' . . . ' , username = ' . . . ' , password = ' . . . ' )
> > > yaxil . subjects ( auth , ' AB1234C ' )
Subject ( uri = u ' / data / experiments / XNAT _ S0001 ' , label = u ' AB1234C ' , id = u ' XNAT _ S0001 ' ,
project = u ' MyProject ' )
: param auth : XNAT authentication
: type auth : : mod : ` yaxil . XnatAuth `
: param label : XNAT Subject label
: type label : str
: param project : XNAT Subject Project
: type project : str
: returns : Subject objects
: rtype : : mod : ` yaxil . Subject `'''
|
url = '{0}/data/subjects' . format ( auth . url . rstrip ( '/' ) )
logger . debug ( 'issuing http request %s' , url )
# compile query string
columns = [ 'ID' , 'label' , 'project' ]
payload = { 'columns' : ',' . join ( columns ) }
if label :
payload [ 'label' ] = label
if project :
payload [ 'project' ] = project
# submit the request
r = requests . get ( url , params = payload , auth = ( auth . username , auth . password ) , verify = CHECK_CERTIFICATE )
# validate response
if r . status_code != requests . codes . ok :
raise AccessionError ( 'response not ok ({0}) from {1}' . format ( r . status_code , r . url ) )
try :
results = r . json ( )
__quick_validate ( results )
except ResultSetError as e :
raise ResultSetError ( '{0} from {1}' . format ( e . message , r . url ) )
results = results [ 'ResultSet' ]
if int ( results [ 'totalRecords' ] ) == 0 :
raise NoSubjectsError ( 'no records returned from {0}' . format ( r . url ) )
# start generating consumable results for the caller
for item in results [ 'Result' ] :
yield Subject ( uri = item [ 'URI' ] , id = item [ 'ID' ] , project = item [ 'project' ] , label = item [ 'label' ] )
|
def add_oct ( self , oid , value , label = None ) :
"""Short helper to add an octet value to the MIB subtree ."""
|
self . add_oid_entry ( oid , 'OCTET' , value , label = label )
|
def blocks ( self ) :
"""Return an iterator over this function ' s blocks .
The iterator will yield a ValueRef for each block ."""
|
if not self . is_function :
raise ValueError ( 'expected function value, got %s' % ( self . _kind , ) )
it = ffi . lib . LLVMPY_FunctionBlocksIter ( self )
parents = self . _parents . copy ( )
parents . update ( function = self )
return _BlocksIterator ( it , parents )
|
def frames ( self ) :
"""Retrieve a new frame from the Kinect and convert it to a ColorImage ,
a DepthImage , and an IrImage .
Returns
: obj : ` tuple ` of : obj : ` ColorImage ` , : obj : ` DepthImage ` , : obj : ` IrImage ` , : obj : ` numpy . ndarray `
The ColorImage , DepthImage , and IrImage of the current frame .
Raises
RuntimeError
If the Kinect stream is not running ."""
|
color_im = self . _read_color_image ( )
depth_im = self . _read_depth_image ( )
return color_im , depth_im , None
|
def add_to_batch ( self , batch ) :
'''Adds paths to the given batch object . They are all added as
GL _ TRIANGLES , so the batch will aggregate them all into a single OpenGL
primitive .'''
|
for name in self . paths :
svg_path = self . paths [ name ]
svg_path . add_to_batch ( batch )
|
def to_dash_case ( string : str ) -> str :
"""Convert a string to dash - delimited words .
> > > import uqbar . strings
> > > string = ' Tô Đặc Biệt Xe Lửa '
> > > print ( uqbar . strings . to _ dash _ case ( string ) )
to - dac - biet - xe - lua
> > > string = ' alpha . beta . gamma '
> > > print ( uqbar . strings . to _ dash _ case ( string ) )
alpha - beta - gamma"""
|
string = unidecode . unidecode ( string )
words = ( _ . lower ( ) for _ in delimit_words ( string ) )
string = "-" . join ( words )
return string
|
def get_psms ( self ) :
"""Creates iterator to write to new tsv . Contains input tsv
lines plus quant data for these ."""
|
self . header = actions . create_header ( self . oldheader )
self . psms = actions . add_genes_to_psm_table ( self . fn , self . oldheader , self . lookup )
|
def format ( file_metrics , build_metrics ) :
"""compute output in JSON format ."""
|
metrics = { 'files' : file_metrics }
if build_metrics :
metrics [ 'build' ] = build_metrics
body = json . dumps ( metrics , sort_keys = True , indent = 4 ) + '\n'
return body
|
def bencode ( canonical ) :
'''Turns a dictionary into a bencoded str with alphabetized keys
e . g . , { ' spam ' : ' eggs ' , ' cow ' : ' moo ' } - - > d3 : cow3 : moo4 : spam4 : eggse'''
|
in_dict = dict ( canonical )
def encode_str ( in_str ) :
out_str = str ( len ( in_str ) ) + ':' + in_str
return out_str
def encode_int ( in_int ) :
out_str = str ( 'i' + str ( in_int ) + 'e' )
return out_str
def encode_list ( in_list ) :
out_str = 'l'
for item in in_list :
out_str += encode_item ( item )
else :
out_str += 'e'
return out_str
def encode_dict ( in_dict ) :
out_str = 'd'
keys = sorted ( in_dict . keys ( ) )
for key in keys :
val = in_dict [ key ]
out_str = out_str + encode_item ( key ) + encode_item ( val )
else :
out_str += 'e'
return out_str
def encode_item ( x ) :
if isinstance ( x , str ) :
return encode_str ( x )
elif isinstance ( x , int ) :
return encode_int ( x )
elif isinstance ( x , list ) :
return encode_list ( x )
elif isinstance ( x , dict ) :
return encode_dict ( x )
return encode_item ( in_dict )
|
def _replace_interval_with_scalar ( expr ) :
"""Good old Depth - First Search to identify the Interval and IntervalValue
components of the expression and return a comparable scalar expression .
Parameters
expr : float or expression of intervals
For example , ` ` ibis . interval ( days = 1 ) + ibis . interval ( hours = 5 ) ` `
Returns
preceding : float or ir . FloatingScalar , depending upon the expr"""
|
try :
expr_op = expr . op ( )
except AttributeError :
expr_op = None
if not isinstance ( expr , ( dt . Interval , ir . IntervalValue ) ) : # Literal expressions have op method but native types do not .
if isinstance ( expr_op , ops . Literal ) :
return expr_op . value
else :
return expr
elif isinstance ( expr , dt . Interval ) :
try :
microseconds = _map_interval_to_microseconds [ expr . unit ]
return microseconds
except KeyError :
raise ValueError ( "Expected preceding values of week(), " + "day(), hour(), minute(), second(), millisecond(), " + "microseconds(), nanoseconds(); got {}" . format ( expr ) )
elif expr_op . args and isinstance ( expr , ir . IntervalValue ) :
if len ( expr_op . args ) > 2 :
raise com . NotImplementedError ( "'preceding' argument cannot be parsed." )
left_arg = _replace_interval_with_scalar ( expr_op . args [ 0 ] )
right_arg = _replace_interval_with_scalar ( expr_op . args [ 1 ] )
method = _map_interval_op_to_op [ type ( expr_op ) ]
return method ( left_arg , right_arg )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.