signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def ernst_table_exporter ( self , cycle , outfname = 'table_out' , sheetname = 'Sheet 1' ) :
"""This routine takes NuGrid data ( model output ) for a given
cycle and writes it into an Excel sheet .
This is one format as requested by Ernst Zinner in June 2013
( through Marco ) . If you want all radioactive isotopes , start
from the restart file . Empty columns are not written out and
you will get a message how many were empty . Please note that
only one cycle is written out .
Parameters
cycle : integer
Number of the cycle to consider .
outfname : string , optional
File name to write it to , . xlsx is appended automatically .
The default is ' table _ out ' .
sheetname : string , optional
Name of the sheet in the excel file . The default is
' Sheet 1 ' ."""
|
from xlsxwriter . workbook import Workbook
# https : / / xlsxwriter . readthedocs . org / Note : We neex xlswriter . Please meake sure it is installed . Run pip install xlsxwriter to install it using pip . If pip is not installed , install it via easy _ install pip . Depending on the system you are on , you might need sudo rights for thesethings . '
# isotopes and data
all_data = np . array ( self . get ( cycle , 'iso_massf' ) )
header_data = self . se . isotopes
# get mass data
mass_data = np . array ( self . get ( cycle , 'mass' ) ) [ np . newaxis ]
# stack mass data and header together
header_data = np . hstack ( ( [ 'Mass' ] , header_data ) )
all_data = np . hstack ( ( mass_data . transpose ( ) , all_data ) )
# zero the cells with 1 . e - 99 entry
for i in range ( len ( all_data ) ) :
for j in range ( len ( all_data [ i ] ) ) :
if all_data [ i ] [ j ] == 1.e-99 :
all_data [ i ] [ j ] = 0.
# check how many columns have all zeros in the file
colzero = 0
all_sum = all_data . sum ( 0 )
for i in range ( len ( all_sum ) ) :
if all_sum [ i ] == 0. :
colzero += 1
print ( str ( colzero ) + ' columns are empty. Skipping them.' )
# now filter data
all_data_fil = np . zeros ( ( len ( all_data ) , len ( all_data [ 0 ] ) - colzero ) )
header_data_fil = np . zeros ( ( len ( header_data ) - colzero ) , dtype = '|S9' )
k = 0
for j in range ( len ( all_data [ 0 ] ) ) :
if all_sum [ j ] != 0 :
for i in range ( len ( all_data ) ) :
all_data_fil [ i ] [ k ] = all_data [ i ] [ j ]
header_data_fil [ k ] = header_data [ j ]
k += 1
# write to excel file
excelfile = Workbook ( outfname + '.xlsx' )
wsh = excelfile . add_worksheet ( sheetname )
print ( 'If you run from a restart file, this might take a little bit. Be patient!' )
for i in range ( len ( all_data_fil ) ) :
for j in range ( len ( all_data_fil [ i ] ) ) :
if i == 0 :
wsh . write ( 0 , j , header_data_fil [ j ] )
wsh . write ( i + 1 , j , all_data_fil [ i ] [ j ] )
excelfile . close ( )
return None
|
def image_create ( cmptparms , cspace ) :
"""Wrapper for openjpeg library function opj _ image _ create ."""
|
lst = [ ctypes . c_int , ctypes . POINTER ( ImageComptParmType ) , ctypes . c_int ]
OPENJPEG . opj_image_create . argtypes = lst
OPENJPEG . opj_image_create . restype = ctypes . POINTER ( ImageType )
image = OPENJPEG . opj_image_create ( len ( cmptparms ) , cmptparms , cspace )
return ( image )
|
def draw_line ( self , start , end , color ) :
"""Draw a line with the given color on the screen .
: param start : Start point of the line
: param end : End point of the line
: param color : Color of the line
: type start : tuple
: type end : tuple
: type color : tuple"""
|
def dist ( p , a , b ) :
return ( abs ( ( b [ 0 ] - a [ 0 ] ) * ( a [ 1 ] - p [ 1 ] ) - ( a [ 0 ] - p [ 0 ] ) * ( b [ 1 ] - a [ 1 ] ) ) / math . sqrt ( ( b [ 0 ] - a [ 0 ] ) ** 2 + ( b [ 1 ] - a [ 1 ] ) ** 2 ) )
points = [ ]
for x in range ( min ( start [ 0 ] , end [ 0 ] ) , max ( start [ 0 ] , end [ 0 ] ) + 1 ) :
for y in range ( min ( start [ 1 ] , end [ 1 ] ) , max ( start [ 1 ] , end [ 1 ] ) + 1 ) :
if dist ( ( x , y ) , start , end ) < 0.5 :
points . append ( ( x , y ) )
for point in points :
self . draw_dot ( point , color )
|
def calendars ( self ) :
"""Retrieves calendars for this month"""
|
today = datetime . today ( )
first_day , last_day = monthrange ( today . year , today . month )
from_dt = datetime ( today . year , today . month , first_day )
to_dt = datetime ( today . year , today . month , last_day )
params = dict ( self . params )
params . update ( { 'lang' : 'en-us' , 'usertz' : get_localzone ( ) . zone , 'startDate' : from_dt . strftime ( '%Y-%m-%d' ) , 'endDate' : to_dt . strftime ( '%Y-%m-%d' ) } )
req = self . session . get ( self . _calendars , params = params )
self . response = req . json ( )
return self . response [ 'Collection' ]
|
from math import pi
def calculate_sphere_surface ( radius ) :
"""Function to compute the surface area of a sphere given its radius .
Examples :
> > > calculate _ sphere _ surface ( 10)
1256.6370614359173
> > > calculate _ sphere _ surface ( 15)
2827.4333882308138
> > > calculate _ sphere _ surface ( 20)
5026.548245743669
: param radius : Input radius of the sphere
: return : Returns the surface area of the sphere"""
|
sphere_surface = 4 * pi * radius ** 2
return sphere_surface
|
def set_allowed_domains ( self , allowed_domains ) :
"""Set the sequence of allowed domains , or None ."""
|
if allowed_domains is not None :
allowed_domains = tuple ( allowed_domains )
self . _allowed_domains = allowed_domains
|
def get_best_match ( self , options , service_name , api_version = None ) :
"""Given a collection of possible service options , selects the best match .
If no API version is provided , the path to the most recent API version
will be returned . If an API version is provided & there is an exact
match , the path to that version will be returned . If there is no exact
match , an attempt will be made to find a compatible ( earlier ) version .
In all cases , user - created files ( if present ) will be given preference
over the default included versions .
: param options : A dictionary of options . See
` ` . get _ available _ options ( . . . ) ` ` .
: type options : dict
: param service _ name : The name of the desired service
: type service _ name : string
: param api _ version : ( Optional ) The desired API version to load
: type service _ name : string
: returns : The full path to the best matching JSON file"""
|
if not options :
msg = "No JSON files provided. Please check your " + "configuration/install."
raise NoResourceJSONFound ( msg )
if api_version is None : # Give them the very latest option .
best_version = max ( options . keys ( ) )
return options [ best_version ] [ 0 ] , best_version
# They ' ve provided an api _ version . Try to give them exactly what they
# requested , falling back to the best compatible match if no exact
# match can be found .
if api_version in options :
return options [ api_version ] [ 0 ] , api_version
# Find the best compatible match . Run through in descending order .
# When we find a version that ' s lexographically less than the provided
# one , run with it .
for key in sorted ( options . keys ( ) , reverse = True ) :
if key <= api_version :
return options [ key ] [ 0 ] , key
raise NoResourceJSONFound ( "No compatible JSON could be loaded for {0} ({1})." . format ( service_name , api_version ) )
|
def google_nest_count ( self , style ) :
"""calculate the nesting count of google doc lists"""
|
nest_count = 0
if 'margin-left' in style :
nest_count = int ( style [ 'margin-left' ] [ : - 2 ] ) / self . google_list_indent
return nest_count
|
def uma_rp_get_rpt ( self , ticket , claim_token = None , claim_token_format = None , pct = None , rpt = None , scope = None , state = None ) :
"""Function to be used by a UMA Requesting Party to get RPT token .
Parameters :
* * * ticket ( str , REQUIRED ) : * * ticket
* * * claim _ token ( str , OPTIONAL ) : * * claim token
* * * claim _ token _ format ( str , OPTIONAL ) : * * claim token format
* * * pct ( str , OPTIONAL ) : * * pct
* * * rpt ( str , OPTIONAL ) : * * rpt
* * * scope ( list , OPTIONAL ) : * * scope
* * * state ( str , OPTIONAL ) : * * state that is returned from ` uma _ rp _ get _ claims _ gathering _ url ` command
Returns :
* * dict : * * The response from the OP .
Success response : :
" status " : " ok " ,
" data " : {
" access _ token " : " SSJHBSUSSJHVhjsgvhsgvshgsv " ,
" token _ type " : " Bearer " ,
" pct " : " c2F2ZWRjb25zZW50 " ,
" upgraded " : true
NeedInfoError response : :
" error " : " need _ info " ,
" ticket " : " ZXJyb3JfZGV0YWlscw = = " ,
" required _ claims " : [
" claim _ token _ format " : [
" http : / / openid . net / specs / openid - connect - core - 1_0 . html # IDToken "
" claim _ type " : " urn : oid : 0.9.2342.19200300.100.1.3 " ,
" friendly _ name " : " email " ,
" issuer " : [ " https : / / example . com / idp " ] ,
" name " : " email23423453ou453"
" redirect _ user " : " https : / / as . example . com / rqp _ claims ? id = 2346576421"
Raises :
* * OxdServerError : * * When oxd - server reports a generic internal _ error
* * InvalidTicketError : * * When the oxd server returns a " invalid _ ticket " error"""
|
params = { "oxd_id" : self . oxd_id , "ticket" : ticket }
if claim_token :
params [ "claim_token" ] = claim_token
if claim_token_format :
params [ "claim_token_format" ] = claim_token_format
if pct :
params [ "pct" ] = pct
if rpt :
params [ "rpt" ] = rpt
if scope :
params [ "scope" ] = scope
if state :
params [ "state" ] = state
logger . debug ( "Sending command `uma_rp_get_rpt` with params %s" , params )
response = self . msgr . request ( "uma_rp_get_rpt" , ** params )
logger . debug ( "Received response: %s" , response )
if response [ 'status' ] == 'ok' :
return response [ 'data' ]
if response [ 'data' ] [ 'error' ] == 'internal_error' :
raise OxdServerError ( response [ 'data' ] )
if response [ 'data' ] [ 'error' ] == 'need_info' :
return response [ 'data' ]
if response [ 'data' ] [ 'error' ] == 'invalid_ticket' :
raise InvalidTicketError ( response [ 'data' ] )
|
def instruction_BVS ( self , opcode , ea ) :
"""Tests the state of the V ( overflow ) bit and causes a branch if it is
set . That is , branch if the twos complement result was invalid . When
used after an operation on twos complement binary values , this
instruction will branch if there was an overflow .
source code forms : BVS dd ; LBVS DDDD
CC bits " HNZVC " : - - - - -"""
|
if self . V == 1 : # log . info ( " $ % x BVS branch to $ % x , because V = = 1 \ t | % s " % (
# self . program _ counter , ea , self . cfg . mem _ info . get _ shortest ( ea )
self . program_counter . set ( ea )
|
def view_shot ( self , shot ) :
"""View the given shot
: param shot : the shot to view
: type shot : : class : ` jukeboxcore . djadapter . models . Shot `
: returns : None
: rtype : None
: raises : None"""
|
log . debug ( 'Viewing shot %s' , shot . name )
self . cur_shot = None
self . pages_tabw . setCurrentIndex ( 3 )
self . shot_name_le . setText ( shot . name )
self . shot_prj_le . setText ( shot . project . name )
self . shot_seq_le . setText ( shot . sequence . name )
self . shot_start_sb . setValue ( shot . startframe )
self . shot_end_sb . setValue ( shot . endframe )
self . shot_handle_sb . setValue ( shot . handlesize )
self . shot_desc_pte . setPlainText ( shot . description )
assetsrootdata = treemodel . ListItemData ( [ "Name" , "Description" ] )
assetsrootitem = treemodel . TreeItem ( assetsrootdata )
self . shot_asset_model = treemodel . TreeModel ( assetsrootitem )
self . shot_asset_treev . setModel ( self . shot_asset_model )
atypes = { }
assets = shot . assets . all ( )
for a in assets :
atype = a . atype
atypeitem = atypes . get ( atype )
if not atypeitem :
atypedata = djitemdata . AtypeItemData ( atype )
atypeitem = treemodel . TreeItem ( atypedata , assetsrootitem )
atypes [ atype ] = atypeitem
assetdata = djitemdata . AssetItemData ( a )
treemodel . TreeItem ( assetdata , atypeitem )
tasksrootdata = treemodel . ListItemData ( [ "Name" , "Short" ] )
tasksrootitem = treemodel . TreeItem ( tasksrootdata )
self . shot_task_model = treemodel . TreeModel ( tasksrootitem )
self . shot_task_tablev . setModel ( self . shot_task_model )
tasks = shot . tasks . all ( )
for t in tasks :
tdata = djitemdata . TaskItemData ( t )
treemodel . TreeItem ( tdata , tasksrootitem )
self . cur_shot = shot
|
def run_from_argv ( self , argv ) :
"""Changes the option _ list to use the options from the wrapped command .
Adds schema parameter to specify which schema will be used when
executing the wrapped command ."""
|
# load the command object .
try :
app_name = get_commands ( ) [ argv [ 2 ] ]
except KeyError :
raise CommandError ( "Unknown command: %r" % argv [ 2 ] )
if isinstance ( app_name , BaseCommand ) : # if the command is already loaded , use it directly .
klass = app_name
else :
klass = load_command_class ( app_name , argv [ 2 ] )
# Ugly , but works . Delete tenant _ command from the argv , parse the schema manually
# and forward the rest of the arguments to the actual command being wrapped .
del argv [ 1 ]
schema_parser = argparse . ArgumentParser ( )
schema_parser . add_argument ( "-s" , "--schema" , dest = "schema_name" , help = "specify tenant schema" )
schema_namespace , args = schema_parser . parse_known_args ( argv )
tenant = self . get_tenant_from_options_or_interactive ( schema_name = schema_namespace . schema_name )
connection . set_tenant ( tenant )
klass . run_from_argv ( args )
|
def _guess_package ( self , path ) :
"""Used in execute _ codegen to actually invoke the compiler with the proper arguments , and in
_ sources _ to _ be _ generated to declare what the generated files will be ."""
|
supported_prefixes = ( 'com' , 'org' , 'net' , )
package = ''
slash = path . rfind ( os . path . sep )
prefix_with_slash = max ( path . rfind ( os . path . join ( '' , prefix , '' ) ) for prefix in supported_prefixes )
if prefix_with_slash < 0 :
package = path [ : slash ]
elif prefix_with_slash >= 0 :
package = path [ prefix_with_slash : slash ]
package = package . replace ( os . path . sep , ' ' )
package = package . strip ( ) . replace ( ' ' , '.' )
return package
|
def _conflict_bail ( VC_err , version ) :
"""Setuptools was imported prior to invocation , so it is
unsafe to unload it . Bail out ."""
|
conflict_tmpl = textwrap . dedent ( """
The required version of setuptools (>={version}) is not available,
and can't be installed while this script is running. Please
install a more recent version first, using
'easy_install -U setuptools'.
(Currently using {VC_err.args[0]!r})
""" )
msg = conflict_tmpl . format ( ** locals ( ) )
sys . stderr . write ( msg )
sys . exit ( 2 )
|
def set_alias ( alias , ** kwargs ) :
"""Set a path alias .
Arguments :
alias - - The alias specification
entry - - The entry to alias it to
category - - The category to alias it to
url - - The external URL to alias it to"""
|
spec = alias . split ( )
path = spec [ 0 ]
values = { ** kwargs , 'path' : path }
if len ( spec ) > 1 :
values [ 'template' ] = spec [ 1 ]
record = model . PathAlias . get ( path = path )
if record :
record . set ( ** values )
else :
record = model . PathAlias ( ** values )
orm . commit ( )
return record
|
def iter_followers ( self , first_user_id = None ) :
"""获取所有的用户openid列表
详情请参考
https : / / mp . weixin . qq . com / wiki ? t = resource / res _ main & id = mp1421140840
: return : 返回一个迭代器 , 可以用for进行循环 , 得到openid
使用示例 : :
from wechatpy import WeChatClient
client = WeChatClient ( ' appid ' , ' secret ' )
for openid in client . user . iter _ followers ( ) :
print ( openid )"""
|
while True :
follower_data = self . get_followers ( first_user_id )
first_user_id = follower_data [ "next_openid" ]
# 微信有个bug ( 或者叫feature ) , 没有下一页 , 也返回next _ openid这个字段
# 所以要通过total _ count和data的长度比较判断 ( 比较麻烦 , 并且不稳定 )
# 或者获得结果前先判断data是否存在
if 'data' not in follower_data :
return
for openid in follower_data [ 'data' ] [ 'openid' ] :
yield openid
if not first_user_id :
return
|
def create_many ( self , records ) :
"""Create a list of new instances of the related model .
: param records : instances attributes
: type records : list
: rtype : list"""
|
instances = [ ]
for record in records :
instances . append ( self . create ( ** record ) )
return instances
|
def exec_command ( self , command , bufsize = - 1 , check_status = True ) :
"""Execute a command on the SSH server while preserving underling
agent forwarding and sudo privileges .
https : / / github . com / paramiko / paramiko / blob / 1.8 / paramiko / client . py # L348
: param command : the command to execute
: type command : str
: param bufsize : interpreted the same way as by the built - in C { file ( ) } function in python
: type bufsize : int
: param check _ staus : if enabled , waits for the command to complete and return an exception
if the status is non - zero .
: type check _ staus : bool
: returns the stdin , stdout , and stderr of the executing command
: rtype : tuple ( L { ChannelFile } , L { ChannelFile } , L { ChannelFile } )
: raises SSHException : if the server fails to execute the command"""
|
channel = self . transport . open_session ( )
if self . forward_agent :
AgentRequestHandler ( channel )
if self . sudoable :
channel . get_pty ( )
channel . exec_command ( command )
if check_status and channel . recv_exit_status ( ) != 0 :
raise RuntimeError ( "Command execution error: {}" . format ( command ) )
stdin = channel . makefile ( 'wb' , bufsize )
stdout = channel . makefile ( 'rb' , bufsize )
stderr = channel . makefile_stderr ( 'rb' , bufsize )
return ( stdin , stdout , stderr )
|
def reduce_opacity ( img , opacity ) :
"""Returns an image with reduced opacity ."""
|
assert opacity >= 0 and opacity <= 1
if img . mode != 'RGBA' :
img = img . convert ( 'RGBA' )
else :
img = img . copy ( )
alpha = img . split ( ) [ 3 ]
alpha = ImageEnhance . Brightness ( alpha ) . enhance ( opacity )
img . putalpha ( alpha )
return img
|
def quaternion_conjugate ( quaternion ) :
"""Return conjugate of quaternion .
> > > q0 = random _ quaternion ( )
> > > q1 = quaternion _ conjugate ( q0)
> > > q1[0 ] = = q0[0 ] and all ( q1[1 : ] = = - q0[1 : ] )
True"""
|
q = np . array ( quaternion , dtype = np . float64 , copy = True )
np . negative ( q [ 1 : ] , q [ 1 : ] )
return q
|
def get_master ( self , host , port , sentinel_port , sentinel_name ) :
""": param host : Redis host to send request
: param port : Redis port to send request
: param sentinel _ port : sentinel _ port optional
: param sentinel _ name : sentinel _ name optional
: return : master ip and port"""
|
if sentinel_port and sentinel_name :
master = Sentinel ( [ ( host , sentinel_port ) ] , socket_timeout = 1 ) . discover_master ( sentinel_name )
return master
return host , port
|
def get_object_id ( self , datum ) :
"""Identifier of the role assignment ."""
|
# Role assignment doesn ' t have identifier so one will be created
# from the identifier of scope , user and role . This will guaranty the
# unicity .
scope_id = ""
if "project" in datum . scope :
scope_id = datum . scope [ "project" ] [ "id" ]
elif "domain" in datum . scope :
scope_id = datum . scope [ "domain" ] [ "id" ]
assignee_id = ""
if hasattr ( datum , "user" ) :
assignee_id = datum . user [ "id" ]
elif hasattr ( datum , "group" ) :
assignee_id = datum . group [ "id" ]
return "%s%s%s" % ( assignee_id , datum . role [ "id" ] , scope_id )
|
def warn ( filepath , expected ) :
"""Raise warning .
Parameters
filepath : path - like
Given filepath .
expected : string
Expected file suffix ."""
|
filepath = pathlib . Path ( filepath )
message = "file {0} has type {1} (expected {2})" . format ( filepath , filepath . suffix , expected )
warnings . warn ( message , WrongFileTypeWarning )
|
def defgate ( self , name , matrix , parameters = None ) :
"""Define a new static gate .
. . note : :
The matrix elements along each axis are ordered by bitstring . For two qubits the order
is ` ` 00 , 01 , 10 , 11 ` ` , where the the bits * * are ordered in reverse * * by the qubit index ,
i . e . , for qubits 0 and 1 the bitstring ` ` 01 ` ` indicates that qubit 0 is in the state 1.
See also : ref : ` the related documentation section in the QVM Overview < basis - ordering > ` .
: param string name : The name of the gate .
: param array - like matrix : List of lists or Numpy 2d array .
: param list parameters : list of parameters that are used in this gate
: return : The Program instance .
: rtype : Program"""
|
return self . inst ( DefGate ( name , matrix , parameters ) )
|
def new ( self ) : # type : ( ) - > None
'''Create a new Rock Ridge Child Link record .
Parameters :
None .
Returns :
Nothing .'''
|
if self . _initialized :
raise pycdlibexception . PyCdlibInternalError ( 'CL record already initialized!' )
self . child_log_block_num = 0
# This gets set later
self . _initialized = True
|
async def get_alarms ( ) :
"""Get alarms and timers from GH ."""
|
async with aiohttp . ClientSession ( ) as session :
ghlocalapi = Alarms ( LOOP , session , IPADDRESS )
await ghlocalapi . get_alarms ( )
print ( "Alarms:" , ghlocalapi . alarms )
|
def _access_token ( self , request : Request = None , page_id : Text = '' ) :
"""Guess the access token for that specific request ."""
|
if not page_id :
msg = request . message
# type : FacebookMessage
page_id = msg . get_page_id ( )
page = self . settings ( )
if page [ 'page_id' ] == page_id :
return page [ 'page_token' ]
raise PlatformOperationError ( 'Trying to get access token of the ' 'page "{}", which is not configured.' . format ( page_id ) )
|
def current_api_key ( ) :
"""Determines the API key for the current request .
Returns :
The ApiKey instance ."""
|
if app . config . get ( 'IGNORE_AUTH' ) :
return models . ApiKey ( id = 'anonymous_superuser' , secret = '' , superuser = True )
ops = _get_api_key_ops ( )
api_key = ops . get ( )
logging . debug ( 'Authenticated as API key=%r' , api_key . id )
return api_key
|
def get_meter ( self , site , start , end , point_type = 'Green_Button_Meter' , var = "meter" , agg = 'MEAN' , window = '24h' , aligned = True , return_names = True ) :
"""Get meter data from MDAL .
Parameters
site : str
Building name .
start : str
Start date - ' YYYY - MM - DDTHH : MM : SSZ '
end : str
End date - ' YYYY - MM - DDTHH : MM : SSZ '
point _ type : str
Type of data , i . e . Green _ Button _ Meter , Building _ Electric _ Meter . . .
var : str
Variable - " meter " , " weather " . . .
agg : str
Aggregation - MEAN , SUM , RAW . . .
window : str
Size of the moving window .
aligned : bool
return _ names : bool
Returns
( df , mapping , context )"""
|
# Convert time to UTC
start = self . convert_to_utc ( start )
end = self . convert_to_utc ( end )
request = self . compose_MDAL_dic ( point_type = point_type , site = site , start = start , end = end , var = var , agg = agg , window = window , aligned = aligned )
resp = self . m . query ( request )
if return_names :
resp = self . replace_uuid_w_names ( resp )
return resp
|
def _makeini ( self , w , v ) :
"""C initializer string for a wire with a given value ."""
|
pieces = [ ]
for n in range ( self . _limbs ( w ) ) :
pieces . append ( hex ( v & ( ( 1 << 64 ) - 1 ) ) )
v >>= 64
return ',' . join ( pieces ) . join ( '{}' )
|
def save ( self , * args , ** kwargs ) :
"""call synchronizer " after _ external _ layer _ saved " method
for any additional operation that must be executed after save"""
|
after_save = kwargs . pop ( 'after_save' , True )
super ( LayerExternal , self ) . save ( * args , ** kwargs )
# call after _ external _ layer _ saved method of synchronizer
if after_save :
try :
synchronizer = self . synchronizer
except ImproperlyConfigured :
pass
else :
if synchronizer :
synchronizer . after_external_layer_saved ( self . config )
# reload schema
self . _reload_schema ( )
|
def get_blob ( self , repository_id , sha1 , project = None , download = None , file_name = None , resolve_lfs = None ) :
"""GetBlob .
Get a single blob .
: param str repository _ id : The name or ID of the repository .
: param str sha1 : SHA1 hash of the file . You can get the SHA1 of a file using the " Git / Items / Get Item " endpoint .
: param str project : Project ID or project name
: param bool download : If true , prompt for a download rather than rendering in a browser . Note : this value defaults to true if $ format is zip
: param str file _ name : Provide a fileName to use for a download .
: param bool resolve _ lfs : If true , try to resolve a blob to its LFS contents , if it ' s an LFS pointer file . Only compatible with octet - stream Accept headers or $ format types
: rtype : : class : ` < GitBlobRef > < azure . devops . v5_0 . git . models . GitBlobRef > `"""
|
route_values = { }
if project is not None :
route_values [ 'project' ] = self . _serialize . url ( 'project' , project , 'str' )
if repository_id is not None :
route_values [ 'repositoryId' ] = self . _serialize . url ( 'repository_id' , repository_id , 'str' )
if sha1 is not None :
route_values [ 'sha1' ] = self . _serialize . url ( 'sha1' , sha1 , 'str' )
query_parameters = { }
if download is not None :
query_parameters [ 'download' ] = self . _serialize . query ( 'download' , download , 'bool' )
if file_name is not None :
query_parameters [ 'fileName' ] = self . _serialize . query ( 'file_name' , file_name , 'str' )
if resolve_lfs is not None :
query_parameters [ 'resolveLfs' ] = self . _serialize . query ( 'resolve_lfs' , resolve_lfs , 'bool' )
response = self . _send ( http_method = 'GET' , location_id = '7b28e929-2c99-405d-9c5c-6167a06e6816' , version = '5.0' , route_values = route_values , query_parameters = query_parameters )
return self . _deserialize ( 'GitBlobRef' , response )
|
def AddArguments ( cls , argument_group ) :
"""Adds command line arguments to an argument group .
This function takes an argument parser or an argument group object and adds
to it all the command line arguments this helper supports .
Args :
argument _ group ( argparse . _ ArgumentGroup | argparse . ArgumentParser ) :
argparse group ."""
|
argument_group . add_argument ( '--disable_zeromq' , '--disable-zeromq' , action = 'store_false' , dest = 'use_zeromq' , default = True , help = ( 'Disable queueing using ZeroMQ. A Multiprocessing queue will be ' 'used instead.' ) )
|
def find_day_by_offset ( year , month , offset ) :
"""Get the month day based on date and offset
: param year : date year
: type year : int
: param month : date month
: type month : int
: param offset : offset in day to compute ( usually negative )
: type offset : int
: return : day number in the month
: rtype : int
> > > find _ day _ by _ offset ( 2015 , 7 , - 1)
31"""
|
( _ , days_in_month ) = calendar . monthrange ( year , month )
if offset >= 0 :
return min ( offset , days_in_month )
return max ( 1 , days_in_month + offset + 1 )
|
def deserialize_from_http_generics ( cls , body_bytes , headers ) : # type : ( Optional [ Union [ AnyStr , IO ] ] , Mapping ) - > Any
"""Deserialize from HTTP response .
Use bytes and headers to NOT use any requests / aiohttp or whatever
specific implementation .
Headers will tested for " content - type " """
|
# Try to use content - type from headers if available
content_type = None
if 'content-type' in headers :
content_type = headers [ 'content-type' ] . split ( ";" ) [ 0 ] . strip ( ) . lower ( )
# Ouch , this server did not declare what it sent . . .
# Let ' s guess it ' s JSON . . .
# Also , since Autorest was considering that an empty body was a valid JSON ,
# need that test as well . . . .
else :
content_type = "application/json"
if body_bytes :
return cls . deserialize_from_text ( body_bytes , content_type )
return None
|
def remove_id ( self , key ) :
"""Suppress acces with id = key"""
|
self . infos . pop ( key , "" )
new_l = [ a for a in self if not ( a . Id == key ) ]
list . __init__ ( self , new_l )
|
def apply_host_template ( resource_root , name , cluster_name , host_ids , start_roles ) :
"""Apply a host template identified by name on the specified hosts and
optionally start them .
@ param resource _ root : The root Resource object .
@ param name : Host template name .
@ param cluster _ name : Cluster name .
@ param host _ ids : List of host ids .
@ param start _ roles : Whether to start the created roles or not .
@ return : An ApiCommand object .
@ since : API v3"""
|
host_refs = [ ]
for host_id in host_ids :
host_refs . append ( ApiHostRef ( resource_root , host_id ) )
params = { "startRoles" : start_roles }
return call ( resource_root . post , APPLY_HOST_TEMPLATE_PATH % ( cluster_name , name ) , ApiCommand , data = host_refs , params = params , api_version = 3 )
|
def kill ( self , name ) : # type : ( str ) - > None
"""Kills the given component
: param name : Name of the component to kill
: raise ValueError : Invalid component name"""
|
if not name :
raise ValueError ( "Name can't be None or empty" )
with self . __instances_lock :
try : # Running instance
stored_instance = self . __instances . pop ( name )
# Store the reference to the factory context
factory_context = stored_instance . context . factory_context
# Kill it
stored_instance . kill ( )
# Update the singleton state flag
factory_context . is_singleton_active = False
except KeyError : # Queued instance
try : # Extract the component context
context , _ = self . __waiting_handlers . pop ( name )
# Update the singleton state flag
context . factory_context . is_singleton_active = False
except KeyError :
raise ValueError ( "Unknown component instance '{0}'" . format ( name ) )
|
def exit_interpreter ( self ) :
"""Exit interpreter"""
|
self . interpreter . exit_flag = True
if self . multithreaded :
self . interpreter . stdin_write . write ( to_binary_string ( '\n' ) )
self . interpreter . restore_stds ( )
|
def _runMainLoop ( self , rootJob ) :
"""Runs the main loop with the given job .
: param toil . job . Job rootJob : The root job for the workflow .
: rtype : Any"""
|
logProcessContext ( self . config )
with RealtimeLogger ( self . _batchSystem , level = self . options . logLevel if self . options . realTimeLogging else None ) : # FIXME : common should not import from leader
from toil . leader import Leader
return Leader ( config = self . config , batchSystem = self . _batchSystem , provisioner = self . _provisioner , jobStore = self . _jobStore , rootJob = rootJob , jobCache = self . _jobCache ) . run ( )
|
def classificationAccuracyVsNoise ( sp , inputVectors , noiseLevelList ) :
"""Evaluate whether the SP output is classifiable , with varying amount of noise
@ param sp a spatial pooler instance
@ param inputVectors ( list ) list of input SDRs
@ param noiseLevelList ( list ) list of noise levels
: return :"""
|
numInputVector , inputSize = inputVectors . shape
if sp is None :
targetOutputColumns = copy . deepcopy ( inputVectors )
else :
columnNumber = np . prod ( sp . getColumnDimensions ( ) )
# calculate target output given the uncorrupted input vectors
targetOutputColumns = np . zeros ( ( numInputVector , columnNumber ) , dtype = uintType )
for i in range ( numInputVector ) :
sp . compute ( inputVectors [ i ] [ : ] , False , targetOutputColumns [ i ] [ : ] )
outcomes = np . zeros ( ( len ( noiseLevelList ) , numInputVector ) )
for i in range ( len ( noiseLevelList ) ) :
for j in range ( numInputVector ) :
corruptedInputVector = copy . deepcopy ( inputVectors [ j ] [ : ] )
corruptSparseVector ( corruptedInputVector , noiseLevelList [ i ] )
if sp is None :
outputColumns = copy . deepcopy ( corruptedInputVector )
else :
outputColumns = np . zeros ( ( columnNumber , ) , dtype = uintType )
sp . compute ( corruptedInputVector , False , outputColumns )
predictedClassLabel = classifySPoutput ( targetOutputColumns , outputColumns )
outcomes [ i ] [ j ] = predictedClassLabel == j
predictionAccuracy = np . mean ( outcomes , 1 )
return predictionAccuracy
|
def version ( family = 'ipv4' ) :
'''Return version from iptables - - version
CLI Example :
. . code - block : : bash
salt ' * ' iptables . version
IPv6:
salt ' * ' iptables . version family = ipv6'''
|
cmd = '{0} --version' . format ( _iptables_cmd ( family ) )
out = __salt__ [ 'cmd.run' ] ( cmd ) . split ( )
return out [ 1 ]
|
def hide ( input_image_file , img_enc , secret_message = None , secret_file = None , img_format = None , ) :
"""Hide a message ( string ) in an image ."""
|
from zlib import compress
from base64 import b64encode
if secret_file != None :
with open ( secret_file , "r" ) as f :
secret_message = f . read ( )
try :
text = compress ( b64encode ( bytes ( secret_message , "utf-8" ) ) )
except :
text = compress ( b64encode ( secret_message ) )
img = tools . open_image ( input_image_file )
if img_format is None :
img_format = img . format
if "exif" in img . info :
exif_dict = piexif . load ( img . info [ "exif" ] )
else :
exif_dict = { }
exif_dict [ "0th" ] = { }
exif_dict [ "0th" ] [ piexif . ImageIFD . ImageDescription ] = text
exif_bytes = piexif . dump ( exif_dict )
img . save ( img_enc , format = img_format , exif = exif_bytes )
img . close ( )
return img
|
def delete_course ( self , courseid ) :
"""Erase all course data"""
|
# Wipes the course ( delete database )
self . wipe_course ( courseid )
# Deletes the course from the factory ( entire folder )
self . course_factory . delete_course ( courseid )
# Removes backup
filepath = os . path . join ( self . backup_dir , courseid )
if os . path . exists ( os . path . dirname ( filepath ) ) :
for backup in glob . glob ( os . path . join ( filepath , '*.zip' ) ) :
os . remove ( backup )
self . _logger . info ( "Course %s files erased." , courseid )
|
def get_locale ( ) :
'''Get the current system locale
CLI Example :
. . code - block : : bash
salt ' * ' locale . get _ locale'''
|
ret = ''
lc_ctl = salt . utils . systemd . booted ( __context__ )
# localectl on SLE12 is installed but the integration is still broken in latest SP3 due to
# config is rewritten by by many % post installation hooks in the older packages .
# If you use it - - you will break your config . This is not the case in SLE15 anymore .
if lc_ctl and not ( __grains__ [ 'os_family' ] in [ 'Suse' ] and __grains__ [ 'osmajorrelease' ] in [ 12 ] ) :
ret = ( _parse_dbus_locale ( ) if dbus is not None else _localectl_status ( ) [ 'system_locale' ] ) . get ( 'LANG' , '' )
else :
if 'Suse' in __grains__ [ 'os_family' ] :
cmd = 'grep "^RC_LANG" /etc/sysconfig/language'
elif 'RedHat' in __grains__ [ 'os_family' ] :
cmd = 'grep "^LANG=" /etc/sysconfig/i18n'
elif 'Debian' in __grains__ [ 'os_family' ] : # this block only applies to Debian without systemd
cmd = 'grep "^LANG=" /etc/default/locale'
elif 'Gentoo' in __grains__ [ 'os_family' ] :
cmd = 'eselect --brief locale show'
return __salt__ [ 'cmd.run' ] ( cmd ) . strip ( )
elif 'Solaris' in __grains__ [ 'os_family' ] :
cmd = 'grep "^LANG=" /etc/default/init'
else : # don ' t waste time on a failing cmd . run
raise CommandExecutionError ( 'Error: "{0}" is unsupported!' . format ( __grains__ [ 'oscodename' ] ) )
if cmd :
try :
ret = __salt__ [ 'cmd.run' ] ( cmd ) . split ( '=' ) [ 1 ] . replace ( '"' , '' )
except IndexError as err :
log . error ( 'Error occurred while running "%s": %s' , cmd , err )
return ret
|
def citingArticles ( self , uid , count = 100 , offset = 1 , editions = None , timeSpan = None , retrieveParameters = None ) :
"""The citingArticles operation finds citing articles for the article
specified by unique identifier . You may specify only one identifier per
request . Web of Science Core Collection ( WOS ) is the only valid
database for this operation .
: uid : A unique item identifier . It cannot be None or empty string .
: count : Number of records to display in the result . Cannot be less than
0 and cannot be greater than 100 . If count is 0 then only the
summary information will be returned .
: offset : First record in results to return . Must be greater than zero
: editions : List of editions to be searched . If None , user permissions
will be substituted .
Fields :
collection - Name of the collection
edition - Name of the edition
: timeSpan : This element defines specifies a range of publication dates .
If timeSpan is null , then the maximum time span will be
inferred from the editions data .
Fields :
begin - Beginning date for this search . Format : YYYY - MM - DD
end - Ending date for this search . Format : YYYY - MM - DD
: retrieveParameters : Retrieve parameters . If omitted the result of
make _ retrieveParameters ( offset , count , ' RS ' , ' D ' )
is used ."""
|
return self . _search . service . citingArticles ( databaseId = 'WOS' , uid = uid , editions = editions , timeSpan = timeSpan , queryLanguage = 'en' , retrieveParameters = ( retrieveParameters or self . make_retrieveParameters ( offset , count ) ) )
|
def get_access_information ( self , code , # pylint : disable = W0221
update_session = True ) :
"""Return the access information for an OAuth2 authorization grant .
: param code : the code received in the request from the OAuth2 server
: param update _ session : Update the current session with the retrieved
token ( s ) .
: returns : A dictionary with the key / value pairs for access _ token ,
refresh _ token and scope . The refresh _ token value will be done when
the OAuth2 grant is not refreshable ."""
|
retval = super ( AuthenticatedReddit , self ) . get_access_information ( code )
if update_session :
self . set_access_credentials ( ** retval )
return retval
|
def authors ( self ) :
"""A list of scopus _ api . _ ScopusAuthor objects ."""
|
authors = self . xml . find ( 'authors' , ns )
try :
return [ _ScopusAuthor ( author ) for author in authors ]
except TypeError :
return None
|
def _generate_replacement ( interface_number , segment_number ) :
"""This will generate replacement string for
{ port0 } = > { port9}
{ segment0 } = > { segment9}"""
|
replacements = { }
for i in range ( 0 , 9 ) :
replacements [ "port" + str ( i ) ] = interface_number + i
replacements [ "segment" + str ( i ) ] = segment_number + i
return replacements
|
def GET_query ( self , req_hook , req_args ) :
'''Generic GET query method'''
|
# GET request methods only require sessionTokens
headers = { 'content-type' : 'application/json' , 'sessionToken' : self . __session__ }
# HTTP GET query method using requests module
try :
if req_args is None :
response = requests . get ( self . __url__ + req_hook , headers = headers , verify = True )
else :
response = requests . get ( self . __url__ + req_hook + str ( req_args ) , headers = headers , verify = True )
except requests . exceptions . RequestException as err :
self . logger . error ( err )
return '500' , 'Internal Error in RESTful.GET_query()'
# return the token
return response . status_code , response . text
|
def full_parent_name ( self ) :
"""Retrieves the fully qualified parent command name .
This the base command name required to execute it . For example ,
in ` ` ? one two three ` ` the parent name would be ` ` one two ` ` ."""
|
entries = [ ]
command = self
while command . parent is not None :
command = command . parent
entries . append ( command . name )
return ' ' . join ( reversed ( entries ) )
|
def get_assignments ( self , gradebook_id = '' , simple = False , max_points = True , avg_stats = False , grading_stats = False ) :
"""Get assignments for a gradebook .
Return list of assignments for a given gradebook ,
specified by a py : attribute : : gradebook _ id . You can control
if additional parameters are returned , but the response
time with py : attribute : : avg _ stats and py : attribute : : grading _ stats
enabled is significantly longer .
Args :
gradebook _ id ( str ) : unique identifier for gradebook , i . e . ` ` 2314 ` `
simple ( bool ) : return just assignment names , default = ` ` False ` `
max _ points ( bool ) :
Max points is a property of the grading scheme for the
assignment rather than a property of the assignment itself ,
default = ` ` True ` `
avg _ stats ( bool ) : return average grade , default = ` ` False ` `
grading _ stats ( bool ) :
return grading statistics , i . e . number of approved grades ,
unapproved grades , etc . , default = ` ` False ` `
Raises :
requests . RequestException : Exception connection error
ValueError : Unable to decode response content
Returns :
list : list of assignment dictionaries
An example return value is :
. . code - block : : python
u ' assignmentId ' : 2431240,
u ' categoryId ' : 1293820,
u ' description ' : u ' ' ,
u ' dueDate ' : 137239200000,
u ' dueDateString ' : u ' 06-28-2013 ' ,
u ' gradebookId ' : 1293808,
u ' graderVisible ' : True ,
u ' gradingSchemeId ' : 2431243,
u ' gradingSchemeType ' : u ' NUMERIC ' ,
u ' isComposite ' : False ,
u ' isHomework ' : False ,
u ' maxPointsTotal ' : 10.0,
u ' name ' : u ' Homework 1 ' ,
u ' shortName ' : u ' HW1 ' ,
u ' userDeleted ' : False ,
u ' weight ' : 1.0
u ' assignmentId ' : 16708850,
u ' categoryId ' : 1293820,
u ' description ' : u ' ' ,
u ' dueDate ' : 1383541200000,
u ' dueDateString ' : u ' 11-04-2013 ' ,
u ' gradebookId ' : 1293808,
u ' graderVisible ' : False ,
u ' gradingSchemeId ' : 16708851,
u ' gradingSchemeType ' : u ' NUMERIC ' ,
u ' isComposite ' : False ,
u ' isHomework ' : False ,
u ' maxPointsTotal ' : 100.0,
u ' name ' : u ' midterm1 ' ,
u ' shortName ' : u ' mid1 ' ,
u ' userDeleted ' : False ,
u ' weight ' : 1.0"""
|
# These are parameters required for the remote API call , so
# there aren ' t too many arguments
# pylint : disable = too - many - arguments
params = dict ( includeMaxPoints = json . dumps ( max_points ) , includeAvgStats = json . dumps ( avg_stats ) , includeGradingStats = json . dumps ( grading_stats ) )
assignments = self . get ( 'assignments/{gradebookId}' . format ( gradebookId = gradebook_id or self . gradebook_id ) , params = params , )
if simple :
return [ { 'AssignmentName' : x [ 'name' ] } for x in assignments [ 'data' ] ]
return assignments [ 'data' ]
|
def lonlat_to_healpix ( self , lon , lat , return_offsets = False ) :
"""Convert longitudes / latitudes to HEALPix indices ( optionally with offsets )
Parameters
lon , lat : : class : ` ~ astropy . units . Quantity `
The longitude and latitude values as : class : ` ~ astropy . units . Quantity ` instances
with angle units .
return _ offsets : bool
If ` True ` , the returned values are the HEALPix pixel as well as
` ` dx ` ` and ` ` dy ` ` , the fractional positions inside the pixel . If
` False ` ( the default ) , only the HEALPix pixel is returned .
Returns
healpix _ index : ` ~ numpy . ndarray `
1 - D array of HEALPix indices
dx , dy : ` ~ numpy . ndarray `
1 - D arrays of offsets inside the HEALPix pixel in the range [ 0:1 ] ( 0.5
is the center of the HEALPix pixels ) . This is returned if
` ` return _ offsets ` ` is ` True ` ."""
|
return lonlat_to_healpix ( lon , lat , self . nside , return_offsets = return_offsets , order = self . order )
|
def restore ( self ) :
"""Restore signal handlers to their original settings ."""
|
signal . signal ( signal . SIGINT , self . original_sigint )
signal . signal ( signal . SIGTERM , self . original_sigterm )
if os . name == 'nt' :
signal . signal ( signal . SIGBREAK , self . original_sigbreak )
|
def list_file ( self , commit , path , recursive = False ) :
"""Lists the files in a directory .
Params :
* commit : A tuple , string , or Commit object representing the commit .
* path : The path to the directory .
* recursive : If True , continue listing the files for sub - directories ."""
|
req = proto . ListFileRequest ( file = proto . File ( commit = commit_from ( commit ) , path = path ) )
res = self . stub . ListFile ( req , metadata = self . metadata )
file_infos = res . file_info
if recursive :
dirs = [ f for f in file_infos if f . file_type == proto . DIR ]
files = [ f for f in file_infos if f . file_type == proto . FILE ]
return sum ( [ self . list_file ( commit , d . file . path , recursive ) for d in dirs ] , files )
return list ( file_infos )
|
def make_payload ( base , method , params ) :
"""Build Betfair JSON - RPC payload .
: param str base : Betfair base ( " Sports " or " Account " )
: param str method : Betfair endpoint
: param dict params : Request parameters"""
|
payload = { 'jsonrpc' : '2.0' , 'method' : '{base}APING/v1.0/{method}' . format ( ** locals ( ) ) , 'params' : utils . serialize_dict ( params ) , 'id' : 1 , }
return payload
|
def show_bokehjs ( bokehjs_action , develop = False ) :
'''Print a useful report after setuptools output describing where and how
BokehJS is installed .
Args :
bokehjs _ action ( str ) : one of ' built ' , ' installed ' , or ' packaged '
how ( or if ) BokehJS was installed into the python source tree
develop ( bool , optional ) :
whether the command was for " develop " mode ( default : False )
Returns :
None'''
|
print ( )
if develop :
print ( "Installed Bokeh for DEVELOPMENT:" )
else :
print ( "Installed Bokeh:" )
if bokehjs_action in [ 'built' , 'installed' ] :
print ( " - using %s built BokehJS from bokehjs/build\n" % ( bright ( yellow ( "NEWLY" ) ) if bokehjs_action == 'built' else bright ( yellow ( "PREVIOUSLY" ) ) ) )
else :
print ( " - using %s BokehJS, located in 'bokeh.server.static'\n" % bright ( yellow ( "PACKAGED" ) ) )
print ( )
|
def handle_error ( self , error = None ) :
"""Trap for TCPServer errors , otherwise continue ."""
|
if _debug :
TCPServerActor . _debug ( "handle_error %r" , error )
# pass along to the director
if error is not None :
self . director . actor_error ( self , error )
else :
TCPServer . handle_error ( self )
|
def main ( argv = None ) :
"""Entry point
: param argv : Script arguments ( None for sys . argv )
: return : An exit code or None"""
|
# Parse arguments
parser = argparse . ArgumentParser ( prog = "pelix.shell.console" , parents = [ make_common_parser ( ) ] , description = "Pelix Shell Console" , )
# Parse arguments
args = parser . parse_args ( argv )
# Handle arguments
init = handle_common_arguments ( args )
# Set the initial bundles
bundles = [ "pelix.ipopo.core" , "pelix.shell.core" , "pelix.shell.ipopo" , "pelix.shell.completion.pelix" , "pelix.shell.completion.ipopo" , "pelix.shell.console" , ]
bundles . extend ( init . bundles )
# Use the utility method to create , run and delete the framework
framework = pelix . create_framework ( remove_duplicates ( bundles ) , init . properties )
framework . start ( )
# Instantiate components
init . instantiate_components ( framework . get_bundle_context ( ) )
try :
framework . wait_for_stop ( )
except KeyboardInterrupt :
framework . stop ( )
|
def write ( self , features = None , outfile = None , format = 0 , is_leaf_fn = None , format_root_node = False , dist_formatter = None , support_formatter = None , name_formatter = None ) :
"""Returns the newick representation of current node . Several
arguments control the way in which extra data is shown for
every node :
Parameters :
features :
a list of feature names to be exported using the Extended Newick
Format ( i . e . features = [ " name " , " dist " ] ) . Use an empty list to
export all available features in each node ( features = [ ] )
outfile :
writes the output to a given file
format :
defines the newick standard used to encode the tree .
format _ root _ node :
If True , it allows features and branch information from root node
to be exported as a part of the newick text string . For newick
compatibility reasons , this is False by default .
is _ leaf _ fn :
See : func : ` TreeNode . traverse ` for documentation .
* * Example : * *
t . get _ newick ( features = [ " species " , " name " ] , format = 1)"""
|
nw = write_newick ( self , features = features , format = format , is_leaf_fn = is_leaf_fn , format_root_node = format_root_node , dist_formatter = dist_formatter , support_formatter = support_formatter , name_formatter = name_formatter )
if outfile is not None :
with open ( outfile , "w" ) as OUT :
OUT . write ( nw )
else :
return nw
|
def wait ( rh ) :
"""Wait for the virtual machine to go into the specified state .
Input :
Request Handle with the following properties :
function - ' POWERVM '
subfunction - ' WAIT '
userid - userid of the virtual machine
parms [ ' desiredState ' ] - Desired state
parms [ ' maxQueries ' ] - Maximum number of queries to issue
parms [ ' maxWait ' ] - Maximum time to wait in seconds
parms [ ' poll ' ] - Polling interval in seconds
Output :
Request Handle updated with the results .
Return code - 0 : ok , non - zero : error"""
|
rh . printSysLog ( "Enter powerVM.wait, userid: " + rh . userid )
if ( rh . parms [ 'desiredState' ] == 'off' or rh . parms [ 'desiredState' ] == 'on' ) :
results = waitForVMState ( rh , rh . userid , rh . parms [ 'desiredState' ] , maxQueries = rh . parms [ 'maxQueries' ] , sleepSecs = rh . parms [ 'poll' ] )
else :
results = waitForOSState ( rh , rh . userid , rh . parms [ 'desiredState' ] , maxQueries = rh . parms [ 'maxQueries' ] , sleepSecs = rh . parms [ 'poll' ] )
if results [ 'overallRC' ] == 0 :
rh . printLn ( "N" , rh . userid + ": " + rh . parms [ 'desiredState' ] )
else :
rh . updateResults ( results )
rh . printSysLog ( "Exit powerVM.wait, rc: " + str ( rh . results [ 'overallRC' ] ) )
return rh . results [ 'overallRC' ]
|
def filter ( self ) : # noqa A001
"""Filter tag ' s children ."""
|
return [ tag for tag in self . get_contents ( self . tag ) if not self . is_navigable_string ( tag ) and self . match ( tag ) ]
|
def build_output_map ( protomap , get_tensor_by_name ) :
"""Builds a map of tensors from ` protomap ` using ` get _ tensor _ by _ name ` .
Args :
protomap : A proto map < string , TensorInfo > .
get _ tensor _ by _ name : A lambda that receives a tensor name and returns a
Tensor instance .
Returns :
A map from string to Tensor or SparseTensor instances built from ` protomap `
and resolving tensors using ` get _ tensor _ by _ name ( ) ` .
Raises :
ValueError : if a TensorInfo proto is malformed ."""
|
def get_output_from_tensor_info ( tensor_info ) :
encoding = tensor_info . WhichOneof ( "encoding" )
if encoding == "name" :
return get_tensor_by_name ( tensor_info . name )
elif encoding == "coo_sparse" :
return tf . SparseTensor ( get_tensor_by_name ( tensor_info . coo_sparse . indices_tensor_name ) , get_tensor_by_name ( tensor_info . coo_sparse . values_tensor_name ) , get_tensor_by_name ( tensor_info . coo_sparse . dense_shape_tensor_name ) )
else :
raise ValueError ( "Invalid TensorInfo.encoding: %s" % encoding )
return { key : get_output_from_tensor_info ( tensor_info ) for key , tensor_info in protomap . items ( ) }
|
def verify_request ( self ) :
"""Verify LTI request
: raises : LTIException if request validation failed"""
|
request = self . lti_kwargs [ 'app' ] . current_request
if request . method == 'POST' : # Chalice expects JSON and does not nativly support forms data in
# a post body . The below is copied from the parsing of query
# strings as implimented in match _ route of Chalice local . py
parsed_url = request . raw_body . decode ( )
parsed_qs = parse_qs ( parsed_url , keep_blank_values = True )
params = { k : v [ 0 ] for k , v in parsed_qs . items ( ) }
else :
params = request . query_params
log . debug ( params )
log . debug ( 'verify_request?' )
try : # Chalice does not have a url property therefore building it .
protocol = request . headers . get ( 'x-forwarded-proto' , 'http' )
hostname = request . headers [ 'host' ]
path = request . context [ 'path' ]
url = urlunparse ( ( protocol , hostname , path , "" , "" , "" ) )
verify_request_common ( self . _consumers ( ) , url , request . method , request . headers , params )
log . debug ( 'verify_request success' )
# All good to go , store all of the LTI params into a
# session dict for use in views
for prop in LTI_PROPERTY_LIST :
if params . get ( prop , None ) :
log . debug ( "params %s=%s" , prop , params . get ( prop , None ) )
self . session [ prop ] = params [ prop ]
# Set logged in session key
self . session [ LTI_SESSION_KEY ] = True
return True
except LTIException :
log . debug ( 'verify_request failed' )
for prop in LTI_PROPERTY_LIST :
if self . session . get ( prop , None ) :
del self . session [ prop ]
self . session [ LTI_SESSION_KEY ] = False
raise
|
def update_font ( self ) :
"""Update font from Preferences"""
|
font = self . get_plugin_font ( )
for client in self . clients :
client . set_font ( font )
|
def project_activity ( index , start , end ) :
"""Compute the metrics for the project activity section of the enriched
git index .
Returns a dictionary containing a " metric " key . This key contains the
metrics for this section .
: param index : index object
: param start : start date to get the data from
: param end : end date to get the data upto
: return : dictionary with the value of the metrics"""
|
results = { "metrics" : [ Commits ( index , start , end ) , Authors ( index , start , end ) ] }
return results
|
def _update_event ( self , event_index , event_state , event_type , event_value , proc_list , proc_desc , peak_time ) :
"""Update an event in the list"""
|
if event_state == "OK" or event_state == "CAREFUL" : # Reset the automatic process sort key
self . reset_process_sort ( )
# Set the end of the events
endtime = time . mktime ( datetime . now ( ) . timetuple ( ) )
if endtime - self . events_list [ event_index ] [ 0 ] > peak_time : # If event is > peak _ time seconds
self . events_list [ event_index ] [ 1 ] = endtime
else : # If event < = peak _ time seconds , ignore
self . events_list . remove ( self . events_list [ event_index ] )
else : # Update the item
self . set_process_sort ( event_type )
# State
if event_state == "CRITICAL" :
self . events_list [ event_index ] [ 2 ] = event_state
# Min value
self . events_list [ event_index ] [ 6 ] = min ( self . events_list [ event_index ] [ 6 ] , event_value )
# Max value
self . events_list [ event_index ] [ 4 ] = max ( self . events_list [ event_index ] [ 4 ] , event_value )
# Average value
self . events_list [ event_index ] [ 7 ] += event_value
self . events_list [ event_index ] [ 8 ] += 1
self . events_list [ event_index ] [ 5 ] = ( self . events_list [ event_index ] [ 7 ] / self . events_list [ event_index ] [ 8 ] )
# TOP PROCESS LIST ( only for CRITICAL ALERT )
if event_state == "CRITICAL" :
events_sort_key = self . get_event_sort_key ( event_type )
# Sort the current process list to retreive the TOP 3 processes
self . events_list [ event_index ] [ 9 ] = sort_stats ( proc_list , events_sort_key ) [ 0 : 3 ]
self . events_list [ event_index ] [ 11 ] = events_sort_key
# MONITORED PROCESSES DESC
self . events_list [ event_index ] [ 10 ] = proc_desc
return True
|
def update_cache ( self ) :
"""Reset the lal cache . This can be used to update the cache if the
result may change due to more files being added to the filesystem ,
for example ."""
|
cache = locations_to_cache ( self . frame_src , latest = True )
stream = lalframe . FrStreamCacheOpen ( cache )
self . stream = stream
|
def get ( self , model , ** spec ) :
"""get a single model instance by handle
: param model : model
: param handle : instance handle
: return :"""
|
handles = self . __find_handles ( model , ** spec )
if len ( handles ) > 1 :
raise MultipleObjectsReturned ( )
if not handles :
raise ObjectDoesNotExist ( )
return self . get_instance ( model , handles [ 0 ] )
|
def post ( self , request ) :
"""As per : rfc : ` 3.2 ` the token endpoint * only * supports POST requests ."""
|
if constants . ENFORCE_SECURE and not request . is_secure ( ) :
return self . error_response ( { 'error' : 'invalid_request' , 'error_description' : _ ( "A secure connection is required." ) } )
if not 'grant_type' in request . POST :
return self . error_response ( { 'error' : 'invalid_request' , 'error_description' : _ ( "No 'grant_type' included in the " "request." ) } )
grant_type = request . POST [ 'grant_type' ]
if grant_type not in self . grant_types :
return self . error_response ( { 'error' : 'unsupported_grant_type' } )
client = self . authenticate ( request )
if client is None :
return self . error_response ( { 'error' : 'invalid_client' } )
handler = self . get_handler ( grant_type )
try :
return handler ( request , request . POST , client )
except OAuthError , e :
return self . error_response ( e . args [ 0 ] )
|
def get_coordination_numbers ( d ) :
"""Helper method to get the coordination number of all sites in the final
structure from a run .
Args :
Run dict generated by VaspToDbTaskDrone .
Returns :
Coordination numbers as a list of dict of [ { " site " : site _ dict ,
" coordination " : number } , . . . ] ."""
|
structure = Structure . from_dict ( d [ "output" ] [ "crystal" ] )
f = VoronoiNN ( )
cn = [ ]
for i , s in enumerate ( structure . sites ) :
try :
n = f . get_cn ( structure , i )
number = int ( round ( n ) )
cn . append ( { "site" : s . as_dict ( ) , "coordination" : number } )
except Exception :
logger . error ( "Unable to parse coordination errors" )
return cn
|
def parse ( line ) :
"""Parse accesslog line to map Python dictionary .
Returned dictionary has following keys :
- time : access time ( datetime ; naive )
- utcoffset : UTC offset of access time ( timedelta )
- host : remote IP address .
- path : HTTP request path , this will be splitted from query .
- query : HTTP requert query string removed from " ? " .
- method : HTTP request method .
- protocol : HTTP request version .
- status : HTTP response status code . ( int )
- size : HTTP response size , if available . ( int )
- referer : Referer header . if " - " is given , that will be ignored .
- ua : User agent . if " - " is given , that will be ignored .
- ident : remote logname
- user : remote user
- trailing : Additional information if using custom log format .
You can use " utcoffset " with ` dateutil . tz . tzoffset ` like followings :
> > > from dateutil . tz import tzoffset
> > > e = parse ( line )
> > > tz = tzoffset ( None , e [ ' utcoffset ' ] . total _ seconds ( ) )
> > > t = e [ ' time ' ] . replace ( tzinfo = tz )
: param line : one line of access log combined format
: type line : string
: rtype : dict"""
|
m = LOG_FORMAT . match ( line )
if m is None :
return
access = Access . _make ( m . groups ( ) )
entry = { 'host' : access . host , 'path' : access . path , 'query' : access . query , 'method' : access . method , 'protocol' : access . protocol , 'status' : int ( access . status ) }
entry [ 'time' ] = datetime . datetime ( int ( access . year ) , MONTH_ABBR [ access . month ] , int ( access . day ) , int ( access . hour ) , int ( access . minute ) , int ( access . second ) )
# Parse timezone string ; " + YYMM " format .
entry [ 'utcoffset' ] = ( 1 if access . timezone [ 0 ] == '+' else - 1 ) * datetime . timedelta ( hours = int ( access . timezone [ 1 : 3 ] ) , minutes = int ( access . timezone [ 3 : 5 ] ) )
if access . ident != '-' :
entry [ 'ident' ] = access . ident
if access . user != '-' :
entry [ 'user' ] = access . user
if access . size != '-' :
entry [ 'size' ] = int ( access . size )
if access . referer != '-' :
entry [ 'referer' ] = access . referer
if access . ua != '-' :
entry [ 'ua' ] = access . ua
if access . trailing :
entry [ 'trailing' ] = access . trailing . strip ( )
return entry
|
def _compute_projection ( self , X , W ) :
"""Compute the LPP projection matrix
Parameters
X : array _ like , ( n _ samples , n _ features )
The input data
W : array _ like or sparse matrix , ( n _ samples , n _ samples )
The precomputed adjacency matrix
Returns
P : ndarray , ( n _ features , self . n _ components )
The matrix encoding the locality preserving projection"""
|
# TODO : check W input ; handle sparse case
X = check_array ( X )
D = np . diag ( W . sum ( 1 ) )
L = D - W
evals , evecs = eigh_robust ( np . dot ( X . T , np . dot ( L , X ) ) , np . dot ( X . T , np . dot ( D , X ) ) , eigvals = ( 0 , self . n_components - 1 ) )
return evecs
|
def _clean_required_args ( self , url , redirect_uri , client_type ) :
"""Validate and clean the command ' s arguments .
Arguments :
url ( str ) : Client ' s application URL .
redirect _ uri ( str ) : Client application ' s OAuth2 callback URI .
client _ type ( str ) : Client ' s type , indicating whether the Client application
is capable of maintaining the confidentiality of its credentials ( e . g . , running on a
secure server ) or is incapable of doing so ( e . g . , running in a browser ) .
Raises :
CommandError , if the URLs provided are invalid , or if the client type provided is invalid ."""
|
# Validate URLs
for url_to_validate in ( url , redirect_uri ) :
try :
URLValidator ( ) ( url_to_validate )
except ValidationError :
raise CommandError ( "URLs provided are invalid. Please provide valid application and redirect URLs." )
# Validate and map client type to the appropriate django - oauth2 - provider constant
client_type = client_type . lower ( )
client_type = { 'confidential' : CONFIDENTIAL , 'public' : PUBLIC } . get ( client_type )
if client_type is None :
raise CommandError ( "Client type provided is invalid. Please use one of 'confidential' or 'public'." )
self . fields = { # pylint : disable = attribute - defined - outside - init
'url' : url , 'redirect_uri' : redirect_uri , 'client_type' : client_type , }
|
def initQApplication ( ) :
"""Initializes the QtWidgets . QApplication instance . Creates one if it doesn ' t exist .
Sets Argos specific attributes , such as the OrganizationName , so that the application
persistent settings are read / written to the correct settings file / winreg . It is therefore
important to call this function at startup . The ArgosApplication constructor does this .
Returns the application ."""
|
# PyQtGraph recommends raster graphics system for OS - X .
if 'darwin' in sys . platform :
graphicsSystem = "raster"
# raster , native or opengl
os . environ . setdefault ( 'QT_GRAPHICSSYSTEM' , graphicsSystem )
logger . info ( "Setting QT_GRAPHICSSYSTEM to: {}" . format ( graphicsSystem ) )
app = QtWidgets . QApplication ( sys . argv )
initArgosApplicationSettings ( app )
return app
|
def show_label ( self , text , size = None , color = None , font_desc = None ) :
"""display text . unless font _ desc is provided , will use system ' s default font"""
|
font_desc = pango . FontDescription ( font_desc or _font_desc )
if color :
self . set_color ( color )
if size :
font_desc . set_absolute_size ( size * pango . SCALE )
self . show_layout ( text , font_desc )
|
def _verify_service_agreement_signature ( self , did , agreement_id , service_definition_id , consumer_address , signature , ddo = None ) :
"""Verify service agreement signature .
Verify that the given signature is truly signed by the ` consumer _ address `
and represents this did ' s service agreement . .
: param did : DID , str
: param agreement _ id : id of the agreement , hex str
: param service _ definition _ id : identifier of the service inside the asset DDO , str
: param consumer _ address : ethereum account address of consumer , hex str
: param signature : Signature , str
: param ddo : DDO instance
: return : True if signature is legitimate , False otherwise
: raises : ValueError if service is not found in the ddo
: raises : AssertionError if conditions keys do not match the on - chain conditions keys"""
|
if not ddo :
ddo = self . _asset_resolver . resolve ( did )
service_agreement = ServiceAgreement . from_ddo ( service_definition_id , ddo )
agreement_hash = service_agreement . get_service_agreement_hash ( agreement_id , ddo . asset_id , consumer_address , Web3Provider . get_web3 ( ) . toChecksumAddress ( ddo . proof [ 'creator' ] ) , self . _keeper )
prefixed_hash = prepare_prefixed_hash ( agreement_hash )
recovered_address = Web3Provider . get_web3 ( ) . eth . account . recoverHash ( prefixed_hash , signature = signature )
is_valid = ( recovered_address == consumer_address )
if not is_valid :
logger . warning ( f'Agreement signature failed: agreement hash is {agreement_hash.hex()}' )
return is_valid
|
def bundle_visualization ( self , bundle_id , channel = None ) :
'''Get the bundle visualization .
@ param bundle _ id The ID of the bundle .
@ param channel Optional channel name .'''
|
url = self . bundle_visualization_url ( bundle_id , channel = channel )
response = self . _get ( url )
return response . content
|
def remove_child ( self , child ) :
"""Remove a child widget from this widget .
: param child : Object inheriting : class : ` BaseElement `"""
|
self . children . remove ( child )
child . parent = None
if self . view and self . view . is_loaded :
self . view . dispatch ( { 'name' : 'remove' , 'selector' : '#' + child . id } )
|
def pave_community ( self ) :
"""Usage :
containment pave _ community"""
|
settings . project_config . path . mkdir ( )
settings . project_config . base . write_text ( self . context . base_text )
settings . project_config . os_packages . write_text ( "[]" )
settings . project_config . lang_packages . write_text ( "{}" )
|
def installed_packages ( self ) :
""": return : list of installed packages"""
|
packages = [ ]
CMDLINE = [ sys . executable , "-mpip" , "freeze" ]
try :
for package in subprocess . check_output ( CMDLINE ) . decode ( 'utf-8' ) . splitlines ( ) :
for comparator in [ "==" , ">=" , "<=" , "<" , ">" ] :
if comparator in package : # installed package names usually look like Pillow = = 2.8.1
# ignore others , like external packages that pip show
# won ' t understand
name = package . partition ( comparator ) [ 0 ]
packages . append ( name )
except RuntimeError as e :
if logger . isEnabledFor ( logging . DEBUG ) :
logger . debug ( "Exception checking existing packages." )
logger . debug ( "cmdline: %s" , CMDLINE )
ex_type , ex , tb = sys . exc_info ( )
traceback . print_tb ( tb )
logger . debug ( )
return packages
|
def autofit ( self ) :
"""Return | False | if there is a ` ` < w : tblLayout > ` ` child with ` ` w : type ` `
attribute set to ` ` ' fixed ' ` ` . Otherwise return | True | ."""
|
tblLayout = self . tblLayout
if tblLayout is None :
return True
return False if tblLayout . type == 'fixed' else True
|
def jd2dt ( jd ) :
"""Convert julian date to datetime"""
|
n = int ( round ( float ( jd ) ) )
a = n + 32044
b = ( 4 * a + 3 ) // 146097
c = a - ( 146097 * b ) // 4
d = ( 4 * c + 3 ) // 1461
e = c - ( 1461 * d ) // 4
m = ( 5 * e + 2 ) // 153
day = e + 1 - ( 153 * m + 2 ) // 5
month = m + 3 - 12 * ( m // 10 )
year = 100 * b + d - 4800 + m / 10
tfrac = 0.5 + float ( jd ) - n
tfrac_s = 86400.0 * tfrac
minfrac , hours = np . modf ( tfrac_s / 3600. )
secfrac , minutes = np . modf ( minfrac * 60. )
microsec , seconds = np . modf ( secfrac * 60. )
return datetime ( year , month , day , int ( hours ) , int ( minutes ) , int ( seconds ) , int ( microsec * 1E6 ) )
|
def restore_snapshot ( self , si , logger , session , vm_uuid , resource_fullname , snapshot_name ) :
"""Restores a virtual machine to a snapshot
: param vim . ServiceInstance si : py _ vmomi service instance
: param logger : Logger
: param session : CloudShellAPISession
: type session : cloudshell _ api . CloudShellAPISession
: param vm _ uuid : uuid of the virtual machine
: param resource _ fullname :
: type : resource _ fullname : str
: param str snapshot _ name : Snapshot name to save the snapshot to"""
|
vm = self . pyvmomi_service . find_by_uuid ( si , vm_uuid )
logger . info ( "Revert snapshot" )
snapshot = SnapshotRestoreCommand . _get_snapshot ( vm = vm , snapshot_name = snapshot_name )
session . SetResourceLiveStatus ( resource_fullname , "Offline" , "Powered Off" )
task = snapshot . RevertToSnapshot_Task ( )
return self . task_waiter . wait_for_task ( task = task , logger = logger , action_name = 'Revert Snapshot' )
|
def get_next ( self ) :
"""Return the next set of objects in a list"""
|
url = self . _get_link ( 'next' )
resource = self . object_type . get_resource_class ( self . client )
resp = resource . perform_api_call ( resource . REST_READ , url )
return List ( resp , self . object_type , self . client )
|
async def update_from_devices ( self ) :
"""Retrieve a list of & devices and values ."""
|
res = await self . get_json ( URL_DEVICES . format ( self . _url ) )
if res :
self . devices . update_devices ( res )
return True
return False
|
def warm ( self , jittering_ratio = 0.2 ) :
"""Progressively load the previous snapshot during the day .
Loading all the snapshots at once can takes a substantial amount of time . This method , if called
periodically during the day will progressively load those snapshots one by one . Because many workers are
going to use this method at the same time , we add a jittering to the period between load to avoid
hammering the disk at the same time ."""
|
if self . snapshot_to_load == None :
last_period = self . current_period - dt . timedelta ( days = self . expiration - 1 )
self . compute_refresh_period ( )
self . snapshot_to_load = [ ]
base_filename = "%s/%s_%s_*.dat" % ( self . snapshot_path , self . name , self . expiration )
availables_snapshots = glob . glob ( base_filename )
for filename in availables_snapshots :
snapshot_period = dt . datetime . strptime ( filename . split ( '_' ) [ - 1 ] . strip ( '.dat' ) , "%Y-%m-%d" )
if snapshot_period >= last_period :
self . snapshot_to_load . append ( filename )
self . ready = False
if self . snapshot_to_load and self . _should_warm ( ) :
filename = self . snapshot_to_load . pop ( )
self . _union_bf_from_file ( filename )
jittering = self . warm_period * ( np . random . random ( ) - 0.5 ) * jittering_ratio
self . next_snapshot_load = time . time ( ) + self . warm_period + jittering
if not self . snapshot_to_load :
self . ready = True
|
def cast ( self , mapping ) :
"""Allocate the scene script a cast of personae for each of its entities .
: param mapping : A dictionary of { Entity , Persona }
: return : The SceneScript object ."""
|
# See ' citation ' method in
# http : / / docutils . sourceforge . net / docutils / parsers / rst / states . py
for c , p in mapping . items ( ) :
self . doc . note_citation ( c )
self . doc . note_explicit_target ( c , c )
c . persona = p
self . log . debug ( "{0} to be played by {1}" . format ( c [ "names" ] [ 0 ] . capitalize ( ) , p ) )
return self
|
def _remove_trustee ( self , device ) :
'''Remove a trustee from the trust domain .
: param device : MangementRoot object - - device to remove'''
|
trustee_name = get_device_info ( device ) . name
name_object_map = get_device_names_to_objects ( self . devices )
delete_func = self . _get_delete_trustee_cmd
for truster in self . domain :
if trustee_name in self . domain [ truster ] and truster != trustee_name :
truster_obj = name_object_map [ truster ]
self . _modify_trust ( truster_obj , delete_func , trustee_name )
self . _populate_domain ( )
for trustee in self . domain [ trustee_name ] :
if trustee_name != trustee :
self . _modify_trust ( device , delete_func , trustee )
self . devices . remove ( name_object_map [ trustee_name ] )
|
def _convert_and_assert_per_example_weights_compatible ( input_ , per_example_weights , dtype ) :
"""Converts per _ example _ weights to a tensor and validates the shape ."""
|
per_example_weights = tf . convert_to_tensor ( per_example_weights , name = 'per_example_weights' , dtype = dtype )
if input_ . get_shape ( ) . ndims :
expected_length = input_ . get_shape ( ) . dims [ 0 ]
message = ( 'per_example_weights must have rank 1 and length %s, but was: %s' % ( expected_length , per_example_weights . get_shape ( ) ) )
else :
expected_length = None
message = ( 'per_example_weights must have rank 1 and length equal to the ' 'first dimension of inputs (unknown), but was: %s' % per_example_weights . get_shape ( ) )
if per_example_weights . get_shape ( ) . ndims not in ( 1 , None ) :
raise ValueError ( message )
if not per_example_weights . get_shape ( ) . is_compatible_with ( ( expected_length , ) ) :
raise ValueError ( message )
return per_example_weights
|
def _Connect ( host = None , port = None , user = None , password = None , database = None , client_key_path = None , client_cert_path = None , ca_cert_path = None ) :
"""Connect to MySQL and check if server fulfills requirements ."""
|
connection_args = _GetConnectionArgs ( host = host , port = port , user = user , password = password , database = database , client_key_path = client_key_path , client_cert_path = client_cert_path , ca_cert_path = ca_cert_path )
conn = MySQLdb . Connect ( ** connection_args )
with contextlib . closing ( conn . cursor ( ) ) as cursor :
_CheckForSSL ( cursor )
_SetMariaDBMode ( cursor )
_SetSqlMode ( cursor )
_SetBinlogFormat ( cursor )
_SetPacketSizeForFollowingConnections ( cursor )
_SetEncoding ( cursor )
_CheckConnectionEncoding ( cursor )
_CheckLogFileSize ( cursor )
return conn
|
def load_config_file ( config_file : str ) -> HmipConfig :
"""Loads the config ini file .
: raises a FileNotFoundError when the config file does not exist ."""
|
_config = configparser . ConfigParser ( )
with open ( config_file , "r" ) as fl :
_config . read_file ( fl )
logging_filename = _config . get ( "LOGGING" , "FileName" , fallback = "hmip.log" )
if logging_filename == "None" :
logging_filename = None
_hmip_config = HmipConfig ( _config [ "AUTH" ] [ "AuthToken" ] , _config [ "AUTH" ] [ "AccessPoint" ] , int ( _config . get ( "LOGGING" , "Level" , fallback = 30 ) ) , logging_filename , _config . _sections , )
return _hmip_config
|
def _collect_masters_map ( self , response ) :
'''Collect masters map from the network .
: return :'''
|
while True :
try :
data , addr = self . _socket . recvfrom ( 0x400 )
if data :
if addr not in response :
response [ addr ] = [ ]
response [ addr ] . append ( data )
else :
break
except Exception as err :
if not response :
self . log . error ( 'Discovery master collection failure: %s' , err )
break
|
def ok ( prompt = 'OK ' , loc = { } , glo = { } , cmd = "" ) :
'''Invoke the peforth interpreter .
An statement : peforth . ok ( prompt = ' OK ' , loc = locals ( ) , glo = globals ( ) , cmd = " " )
is like a breakpoint . The prompt indicates which breakpoint it is if there are
many . Arguments loc ( locals ) and glo ( globals ) along with the prompt are the
debuggee ' s informations that is packed as a tuple ( loc , glo , prompt ) left on TOS
of the FORTH vm when the breakpoint is called . Replace the loc = locals ( ) with
loc = dict ( locals ( ) ) to get a snapshot copy instead of a reference , as well as
the glo . ' exit ' command to stop debugging .'''
|
if loc or glo :
vm . push ( ( loc , glo , prompt ) )
# parent ' s data
while True :
if cmd == "" :
if vm . tick ( 'accept' ) and not vm . multiple : # Input can be single line ( default ) or
vm . execute ( 'accept' )
# multiple lines . Press Ctrl - D to toggle
cmd = vm . pop ( ) . strip ( )
# between the two modes . Place a Ctrl - D
elif vm . tick ( '<accept>' ) and vm . multiple : # before the last < Enter > key to end the
vm . execute ( '<accept>' )
# input when in multiple - line mode .
cmd = vm . pop ( ) . strip ( )
else :
cmd = input ( "" ) . strip ( )
# pass the command line to forth VM
if cmd == "" :
print ( prompt , end = "" )
continue
elif cmd == chr ( 4 ) :
vm . multiple = not vm . multiple
if not vm . multiple :
print ( prompt , end = "" )
else :
vm . dictate ( cmd )
if vm . multiple :
vm . multiple = False
# switch back to the normal mode
print ( prompt , end = "" )
cmd = ""
# Master switch vm . exit is a flag of boolean . When it ' s True
# then exit to the caller that usually is python interpreter .
if vm . exit :
vm . exit = False
# Avoid exit immediately when called again
break
return ( vm )
# support function cascade
|
def getpreferredencoding ( ) :
"""Determine the proper output encoding for terminal rendering"""
|
# Borrowed from Invoke
# ( see https : / / github . com / pyinvoke / invoke / blob / 93af29d / invoke / runners . py # L881)
_encoding = locale . getpreferredencoding ( False )
if six . PY2 and not sys . platform == "win32" :
_default_encoding = locale . getdefaultlocale ( ) [ 1 ]
if _default_encoding is not None :
_encoding = _default_encoding
return _encoding
|
def binary_thin ( image , strel1 , strel2 ) :
"""Morphologically thin an image
strel1 - the required values of the pixels in order to survive
strel2 - at each pixel , the complement of strel1 if we care about the value"""
|
hit_or_miss = scind . binary_hit_or_miss ( image , strel1 , strel2 )
return np . logical_and ( image , np . logical_not ( hit_or_miss ) )
|
def read ( fname ) :
"""Return content of specified file"""
|
path = os . path . join ( SCRIPTDIR , fname )
if PY3 :
f = open ( path , 'r' , encoding = 'utf8' )
else :
f = open ( path , 'r' )
content = f . read ( )
f . close ( )
return content
|
def process_forever ( self , timeout = 0.2 ) :
"""Run an infinite loop , processing data from connections .
This method repeatedly calls process _ once .
Arguments :
timeout - - Parameter to pass to process _ once ."""
|
# This loop should specifically * not * be mutex - locked .
# Otherwise no other thread would ever be able to change
# the shared state of a Reactor object running this function .
log . debug ( "process_forever(timeout=%s)" , timeout )
one = functools . partial ( self . process_once , timeout = timeout )
consume ( infinite_call ( one ) )
|
def link_asset_content_key ( access_token , asset_id , encryptionkey_id , ams_redirected_rest_endpoint ) :
'''Link Media Service Asset and Content Key .
Args :
access _ token ( str ) : A valid Azure authentication token .
asset _ id ( str ) : A Media Service Asset ID .
encryption _ id ( str ) : A Media Service Encryption ID .
ams _ redirected _ rest _ endpoint ( str ) : A Media Service Redirected Endpoint .
Returns :
HTTP response . JSON body .'''
|
path = '/Assets'
full_path = '' . join ( [ path , "('" , asset_id , "')" , "/$links/ContentKeys" ] )
full_path_encoded = urllib . parse . quote ( full_path , safe = '' )
endpoint = '' . join ( [ ams_rest_endpoint , full_path_encoded ] )
uri = '' . join ( [ ams_redirected_rest_endpoint , 'ContentKeys' , "('" , encryptionkey_id , "')" ] )
body = '{"uri": "' + uri + '"}'
return do_ams_post ( endpoint , full_path_encoded , body , access_token )
|
def link ( self , family : str , sample : str , analysis_type : str , files : List [ str ] ) :
"""Link FASTQ files for a sample ."""
|
root_dir = Path ( self . families_dir ) / family / analysis_type / sample / 'fastq'
root_dir . mkdir ( parents = True , exist_ok = True )
for fastq_data in files :
fastq_path = Path ( fastq_data [ 'path' ] )
fastq_name = self . name_file ( lane = fastq_data [ 'lane' ] , flowcell = fastq_data [ 'flowcell' ] , sample = sample , read = fastq_data [ 'read' ] , undetermined = fastq_data [ 'undetermined' ] , )
dest_path = root_dir / fastq_name
if not dest_path . exists ( ) :
log . info ( f"linking: {fastq_path} -> {dest_path}" )
dest_path . symlink_to ( fastq_path )
else :
log . debug ( f"destination path already exists: {dest_path}" )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.