signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def dispatchEvent ( self , event , * args ) :
"""Fire all callbacks assigned to a particular event . To be called by
derivative classes .
: param * args : Additional arguments to be passed to the callback
function ."""
|
for callback in self . listeners [ event ] :
yield callback ( event , self , * args )
|
def validate_sum ( parameter_container , validation_message , ** kwargs ) :
"""Validate the sum of parameter value ' s .
: param parameter _ container : The container that use this validator .
: type parameter _ container : ParameterContainer
: param validation _ message : The message if there is validation error .
: type validation _ message : str
: param kwargs : Keywords Argument .
: type kwargs : dict
: returns : Dictionary of valid and message .
: rtype : dict
Note : The code is not the best I wrote , since there are two alternatives .
1 . If there is no None , the sum must be equal to 1
2 . If there is no None , the sum must be less than 1"""
|
parameters = parameter_container . get_parameters ( False )
values = [ ]
for parameter in parameters :
if parameter . selected_option_type ( ) in [ SINGLE_DYNAMIC , STATIC ] :
values . append ( parameter . value )
sum_threshold = kwargs . get ( 'max' , 1 )
if None in values : # If there is None , just check to not exceeding validation _ threshold
clean_value = [ x for x in values if x is not None ]
values . remove ( None )
if sum ( clean_value ) > sum_threshold :
return { 'valid' : False , 'message' : validation_message }
else : # Just make sure to not have more than validation _ threshold .
if sum ( values ) > sum_threshold :
return { 'valid' : False , 'message' : validation_message }
return { 'valid' : True , 'message' : '' }
|
def make_slack_blueprint ( client_id = None , client_secret = None , scope = None , redirect_url = None , redirect_to = None , login_url = None , authorized_url = None , session_class = None , storage = None , ) :
"""Make a blueprint for authenticating with Slack using OAuth 2 . This requires
a client ID and client secret from Slack . You should either pass them to
this constructor , or make sure that your Flask application config defines
them , using the variables : envvar : ` SLACK _ OAUTH _ CLIENT _ ID ` and
: envvar : ` SLACK _ OAUTH _ CLIENT _ SECRET ` .
Args :
client _ id ( str ) : The client ID for your application on Slack .
client _ secret ( str ) : The client secret for your application on Slack
scope ( str , optional ) : comma - separated list of scopes for the OAuth token
redirect _ url ( str ) : the URL to redirect to after the authentication
dance is complete
redirect _ to ( str ) : if ` ` redirect _ url ` ` is not defined , the name of the
view to redirect to after the authentication dance is complete .
The actual URL will be determined by : func : ` flask . url _ for `
login _ url ( str , optional ) : the URL path for the ` ` login ` ` view .
Defaults to ` ` / slack ` `
authorized _ url ( str , optional ) : the URL path for the ` ` authorized ` ` view .
Defaults to ` ` / slack / authorized ` ` .
session _ class ( class , optional ) : The class to use for creating a
Requests session . Defaults to
: class : ` ~ flask _ dance . consumer . requests . OAuth2Session ` .
storage : A token storage class , or an instance of a token storage
class , to use for this blueprint . Defaults to
: class : ` ~ flask _ dance . consumer . storage . session . SessionStorage ` .
: rtype : : class : ` ~ flask _ dance . consumer . OAuth2ConsumerBlueprint `
: returns : A : ref : ` blueprint < flask : blueprints > ` to attach to your Flask app ."""
|
scope = scope or [ "identify" , "chat:write:bot" ]
slack_bp = SlackBlueprint ( "slack" , __name__ , client_id = client_id , client_secret = client_secret , scope = scope , base_url = "https://slack.com/api/" , authorization_url = "https://slack.com/oauth/authorize" , token_url = "https://slack.com/api/oauth.access" , redirect_url = redirect_url , redirect_to = redirect_to , login_url = login_url , authorized_url = authorized_url , session_class = session_class , storage = storage , )
slack_bp . from_config [ "client_id" ] = "SLACK_OAUTH_CLIENT_ID"
slack_bp . from_config [ "client_secret" ] = "SLACK_OAUTH_CLIENT_SECRET"
@ slack_bp . before_app_request
def set_applocal_session ( ) :
ctx = stack . top
ctx . slack_oauth = slack_bp . session
return slack_bp
|
def trades ( self , cursor = None , order = 'asc' , limit = 10 , sse = False ) :
"""Retrieve the trades JSON from this instance ' s Horizon server .
Retrieve the trades JSON response for the account associated with
this : class : ` Address ` .
: param cursor : A paging token , specifying where to start returning records from .
When streaming this can be set to " now " to stream object created since your request time .
: type cursor : int , str
: param str order : The order in which to return rows , " asc " or " desc " .
: param int limit : Maximum number of records to return .
: param bool sse : Use the SSE client for connecting to Horizon ."""
|
return self . horizon . account_trades ( self . address , cursor = cursor , order = order , limit = limit , sse = sse )
|
def get_requirement_warn ( self , line ) :
"""Gets name of test case that was not successfully imported ."""
|
res = self . REQ_WARN_SEARCH . search ( line )
try :
return LogItem ( res . group ( 1 ) , None , None )
except ( AttributeError , IndexError ) :
return None
|
def assignmentComplete ( ) :
"""ASSIGNMENT COMPLETE Section 9.1.3"""
|
a = TpPd ( pd = 0x6 )
b = MessageType ( mesType = 0x29 )
# 00101001
c = RrCause ( )
packet = a / b / c
return packet
|
def _reset_docs ( self ) :
"""Helper to clear the docs on RESET or filter mismatch ."""
|
_LOGGER . debug ( "resetting documents" )
self . change_map . clear ( )
self . resume_token = None
# Mark each document as deleted . If documents are not deleted
# they will be sent again by the server .
for snapshot in self . doc_tree . keys ( ) :
name = snapshot . reference . _document_path
self . change_map [ name ] = ChangeType . REMOVED
self . current = False
|
def load ( pathtovector , wordlist = ( ) , num_to_load = None , truncate_embeddings = None , unk_word = None , sep = " " ) :
r"""Read a file in word2vec . txt format .
The load function will raise a ValueError when trying to load items
which do not conform to line lengths .
Parameters
pathtovector : string
The path to the vector file .
header : bool
Whether the vector file has a header of the type
( NUMBER OF ITEMS , SIZE OF VECTOR ) .
wordlist : iterable , optional , default ( )
A list of words you want loaded from the vector file . If this is
None ( default ) , all words will be loaded .
num _ to _ load : int , optional , default None
The number of items to load from the file . Because loading can take
some time , it is sometimes useful to onlyl load the first n items
from a vector file for quick inspection .
truncate _ embeddings : int , optional , default None
If this value is not None , the vectors in the vector space will
be truncated to the number of dimensions indicated by this value .
unk _ word : object
The object to treat as UNK in your vector space . If this is not
in your items dictionary after loading , we add it with a zero
vector .
Returns
r : Reach
An initialized Reach instance ."""
|
vectors , items = Reach . _load ( pathtovector , wordlist , num_to_load , truncate_embeddings , sep )
if unk_word is not None :
if unk_word not in set ( items ) :
unk_vec = np . zeros ( ( 1 , vectors . shape [ 1 ] ) )
vectors = np . concatenate ( [ unk_vec , vectors ] , 0 )
items = [ unk_word ] + items
unk_index = 0
else :
unk_index = items . index ( unk_word )
else :
unk_index = None
return Reach ( vectors , items , name = os . path . split ( pathtovector ) [ - 1 ] , unk_index = unk_index )
|
def _extract_array ( self , kwargs_list ) :
"""inverse of _ update _ kwargs
: param kwargs _ list :
: return :"""
|
if self . _solver_type == 'PROFILE_SHEAR' :
e1 = kwargs_list [ 1 ] [ 'e1' ]
e2 = kwargs_list [ 1 ] [ 'e2' ]
phi_ext , gamma_ext = param_util . ellipticity2phi_gamma ( e1 , e2 )
else :
phi_ext = 0
lens_model = self . _lens_mode_list [ 0 ]
if lens_model in [ 'SPEP' , 'SPEMD' , 'SIE' , 'NIE' ] :
e1 = kwargs_list [ 0 ] [ 'e1' ]
e2 = kwargs_list [ 0 ] [ 'e2' ]
center_x = kwargs_list [ 0 ] [ 'center_x' ]
center_y = kwargs_list [ 0 ] [ 'center_y' ]
theta_E = kwargs_list [ 0 ] [ 'theta_E' ]
x = [ theta_E , e1 , e2 , center_x , center_y , phi_ext ]
elif lens_model in [ 'NFW_ELLIPSE' ] :
e1 = kwargs_list [ 0 ] [ 'e1' ]
e2 = kwargs_list [ 0 ] [ 'e2' ]
center_x = kwargs_list [ 0 ] [ 'center_x' ]
center_y = kwargs_list [ 0 ] [ 'center_y' ]
theta_Rs = kwargs_list [ 0 ] [ 'theta_Rs' ]
x = [ theta_Rs , e1 , e2 , center_x , center_y , phi_ext ]
elif lens_model in [ 'SHAPELETS_CART' ] :
coeffs = list ( kwargs_list [ 0 ] [ 'coeffs' ] )
[ c10 , c01 , c20 , c11 , c02 ] = coeffs [ 1 : 6 ]
x = [ c10 , c01 , c20 , c11 , c02 , phi_ext ]
else :
raise ValueError ( "Lens model %s not supported for 4-point solver!" % lens_model )
return x
|
def control ( self , on = [ ] , off = [ ] ) :
"""This method serves as the primary interaction point
to the controls interface .
- The ' on ' and ' off ' arguments can either be a list or a single string .
This allows for both individual device control and batch controls .
Note :
Both the onlist and offlist are optional .
If only one item is being managed , it can be passed as a string .
Usage :
- Turning off all devices :
ctrlobj . control ( off = " all " )
- Turning on all devices :
ctrlobj . control ( on = " all " )
- Turning on the light and fan ONLY ( for example )
ctrlobj . control ( on = [ " light " , " fan " ] )
- Turning on the light and turning off the fan ( for example )
ctrolobj . control ( on = " light " , off = " fan " )"""
|
controls = { "light" , "valve" , "fan" , "pump" }
def cast_arg ( arg ) :
if type ( arg ) is str :
if arg == "all" :
return controls
else :
return { arg } & controls
else :
return set ( arg ) & controls
# User has requested individual controls .
for item in cast_arg ( on ) :
self . manage ( item , "on" )
for item in cast_arg ( off ) :
self . manage ( item , "off" )
sleep ( .01 )
# Force delay to throttle requests
return self . update ( )
|
def asyncPipeItembuilder ( context = None , _INPUT = None , conf = None , ** kwargs ) :
"""A source that asynchronously builds an item . Loopable .
Parameters
context : pipe2py . Context object
_ INPUT : asyncPipe like object ( twisted Deferred iterable of items )
conf : {
' attrs ' : [
{ ' key ' : { ' value ' : ' title ' } , ' value ' : { ' value ' : ' new title ' } } ,
{ ' key ' : { ' value ' : ' desc . content ' } , ' value ' : { ' value ' : ' new desc ' } }
Returns
_ OUTPUT : twisted . internet . defer . Deferred generator of items"""
|
pkwargs = cdicts ( opts , kwargs )
asyncFuncs = yield asyncGetSplits ( None , conf [ 'attrs' ] , ** pkwargs )
_input = yield _INPUT
finite = utils . finitize ( _input )
inputs = imap ( DotDict , finite )
pieces = yield asyncImap ( asyncFuncs [ 0 ] , inputs )
results = imap ( utils . parse_params , pieces )
_OUTPUT = imap ( DotDict , results )
returnValue ( _OUTPUT )
|
def start_listener ( self ) :
'''start listening for packets'''
|
if self . sock is not None :
self . sock . close ( )
self . sock = socket . socket ( socket . AF_INET , socket . SOCK_DGRAM , socket . IPPROTO_UDP )
self . sock . setsockopt ( socket . SOL_SOCKET , socket . SO_REUSEADDR , 1 )
self . sock . bind ( ( '' , self . asterix_settings . port ) )
self . sock . setblocking ( False )
print ( "Started on port %u" % self . asterix_settings . port )
|
def dump ( self , f ) :
"""Dump data to a file .
: param f : file - like object or path to file
: type f : file or str"""
|
self . validate ( )
with _open_file_obj ( f , "w" ) as f :
parser = self . _get_parser ( )
self . serialize ( parser )
self . build_file ( parser , f )
|
def get_projects ( self , ** kwargs ) :
"""Get a user ' s project .
: param str login : User ' s login ( Default : self . _ login )
: return : JSON"""
|
_login = kwargs . get ( 'login' , self . _login )
search_url = SEARCH_URL . format ( login = _login )
return self . _request_api ( url = search_url ) . json ( )
|
def get_index ( self , value ) :
"""Return the index ( or indices ) of the given value ( or values ) in
` state _ values ` .
Parameters
value
Value ( s ) to get the index ( indices ) for .
Returns
idx : int or ndarray ( int )
Index of ` value ` if ` value ` is a single state value ; array
of indices if ` value ` is an array _ like of state values ."""
|
if self . state_values is None :
state_values_ndim = 1
else :
state_values_ndim = self . state_values . ndim
values = np . asarray ( value )
if values . ndim <= state_values_ndim - 1 :
return self . _get_index ( value )
elif values . ndim == state_values_ndim : # array of values
k = values . shape [ 0 ]
idx = np . empty ( k , dtype = int )
for i in range ( k ) :
idx [ i ] = self . _get_index ( values [ i ] )
return idx
else :
raise ValueError ( 'invalid value' )
|
def validator ( flag_name , message = 'Flag validation failed' , flag_values = _flagvalues . FLAGS ) :
"""A function decorator for defining a flag validator .
Registers the decorated function as a validator for flag _ name , e . g .
@ flags . validator ( ' foo ' )
def _ CheckFoo ( foo ) :
See register _ validator ( ) for the specification of checker function .
Args :
flag _ name : str , name of the flag to be checked .
message : str , error text to be shown to the user if checker returns False .
If checker raises flags . ValidationError , message from the raised
error will be shown .
flag _ values : flags . FlagValues , optional FlagValues instance to validate
against .
Returns :
A function decorator that registers its function argument as a validator .
Raises :
AttributeError : Raised when flag _ name is not registered as a valid flag
name ."""
|
def decorate ( function ) :
register_validator ( flag_name , function , message = message , flag_values = flag_values )
return function
return decorate
|
def crop ( self , vector , resolution = None , masked = None , bands = None , resampling = Resampling . cubic ) :
"""crops raster outside vector ( convex hull )
: param vector : GeoVector , GeoFeature , FeatureCollection
: param resolution : output resolution , None for full resolution
: param resampling : reprojection resampling method , default ` cubic `
: return : GeoRaster"""
|
bounds , window = self . _vector_to_raster_bounds ( vector . envelope , boundless = self . _image is None )
if resolution :
xsize , ysize = self . _resolution_to_output_shape ( bounds , resolution )
else :
xsize , ysize = ( None , None )
return self . pixel_crop ( bounds , xsize , ysize , window = window , masked = masked , bands = bands , resampling = resampling )
|
def get_address_transactions ( self , account_id , address_id , ** params ) :
"""https : / / developers . coinbase . com / api / v2 # list - address39s - transactions"""
|
response = self . _get ( 'v2' , 'accounts' , account_id , 'addresses' , address_id , 'transactions' , params = params )
return self . _make_api_object ( response , Transaction )
|
def _register_server ( self , server , timeout = 30 ) :
'''Register a new SiriDB Server .
This method is used by the SiriDB manage tool and should not be used
otherwise . Full access rights are required for this request .'''
|
result = self . _loop . run_until_complete ( self . _protocol . send_package ( CPROTO_REQ_REGISTER_SERVER , data = server , timeout = timeout ) )
return result
|
def is_name_zonefile_hash ( name , zonefile_hash , hostport = None , proxy = None ) :
"""Determine if a name set a given zone file hash .
Return { ' result ' : True / False } if so
Return { ' error ' : . . . } on error"""
|
assert hostport or proxy , 'Need hostport or proxy'
if proxy is None :
proxy = connect_hostport ( hostport )
zonefile_check_schema = { 'type' : 'object' , 'properties' : { 'result' : { 'type' : 'boolean' } } , 'required' : [ 'result' ] }
schema = json_response_schema ( zonefile_check_schema )
resp = { }
try :
resp = proxy . is_name_zonefile_hash ( name , zonefile_hash )
resp = json_validate ( schema , resp )
if json_is_error ( resp ) :
return resp
except ValidationError as e :
if BLOCKSTACK_DEBUG :
log . exception ( e )
resp = { 'error' : 'Server response did not match expected schema. You are likely communicating with an out-of-date Blockstack node.' , 'http_status' : 502 }
return resp
except socket . timeout :
log . error ( "Connection timed out" )
resp = { 'error' : 'Connection to remote host timed out.' , 'http_status' : 503 }
return resp
except socket . error as se :
log . error ( "Connection error {}" . format ( se . errno ) )
resp = { 'error' : 'Connection to remote host failed.' , 'http_status' : 502 }
return resp
except Exception as ee :
if BLOCKSTACK_DEBUG :
log . exception ( ee )
log . error ( "Caught exception while connecting to Blockstack node: {}" . format ( ee ) )
resp = { 'error' : 'Failed to contact Blockstack node. Try again with `--debug`.' , 'http_status' : 500 }
return resp
return { 'result' : resp [ 'result' ] }
|
def save_share_list ( self , url , path , password = None , filter_callback = None ) :
"""保存分享文件列表到自己的网盘 , 支持密码 , 支持文件过滤的回调函数
: param url : 分享的url
: type url : str
: param path 保存到自己网盘的位置
: type path : str
: param password 分享密码 , 如果没有分享资源没有密码则不用填
: type password : str
: param filter _ callback 过滤文件列表中文件的回调函数 , filter ( file ) , 返回值是假值则被过滤掉
file = {
" filename " : " xxx " ,
" size " : 1234,
" isdir " : 0
: return
" error " : 0 , # 无错误为0 , 否则出错 .
" result " : [ ] # 如果成功会返回添加到自己网盘的文件列表
context是从分享页面的html中提取的json , 里面保存了分享文件列表
暂时有用的是file _ list , uk , shareid
context = {
" typicalPath " : " \ / \u65b0 \u5efa \u6587 \u4ef6 \u5939 (1 ) \ / [ SumiSora ] [ Yosuga _ no _ Sora ] [ BDRip ] [ BIG5 ] [ 720P ] " ,
" self " : false ,
" username " : " " ,
" photo " : " http : \ / \ / himg . bdimg . com \ / sys \ / portrait \ / item \ / 0237bb1b . jpg " ,
" uk " : 924798052,
" ctime " : 1455779404,
" flag " : 2,
" linkusername " : " cls1010123 " ,
" vCnt " : 118442,
" tCnt " : 27916,
" dCnt " : 12006,
" file _ list " : {
" errno " : 0,
" list " : [ {
" fs _ id " : 882212291049391,
" app _ id " : " 250528 " ,
" parent _ path " : " % 2F % E6%96 % B0 % E5 % BB % BA % E6%96%87 % E4 % BB % B6 % E5 % A4 % B9%281%29 " ,
" server _ filename " : " [ SumiSora ] [ Yosuga _ no _ Sora ] [ BDRip ] [ BIG5 ] [ 720P ] " ,
" size " : 0,
" server _ mtime " : 1455779174,
" server _ ctime " : 1455779174,
" local _ mtime " : 1455779174,
" local _ ctime " : 1455779174,
" isdir " : 1,
" isdelete " : " 0 " ,
" status " : " 0 " ,
" category " : 6,
" share " : " 0 " ,
" path _ md5 " : " 18281300157632491061 " ,
" delete _ fs _ id " : " 0 " ,
" extent _ int3 " : " 0 " ,
" extent _ tinyint1 " : " 0 " ,
" extent _ tinyint2 " : " 0 " ,
" extent _ tinyint3 " : " 0 " ,
" extent _ tinyint4 " : " 0 " ,
" path " : " \ / \u65b0 \u5efa \u6587 \u4ef6 \u5939 (1 ) \ / [ SumiSora ] [ Yosuga _ no _ Sora ] [ BDRip ] [ BIG5 ] [ 720P ] " ,
" root _ ns " : 465254146,
" md5 " : " " ,
" file _ key " : " "
" loginstate " : 0,
" channel " : 4,
" third _ url " : 0,
" bdstoken " : null ,
" sampling " : {
" expvar " : [ " chengyong " ]
" is _ vip " : 0,
" description " : " " ,
" shorturl " : " 1skhBegP " ,
" shareinfo " : " " ,
" is _ baiduspider " : 0,
" isinwhitelist " : 0,
" public " : 0,
" shareid " : 23915657,
" bj _ unicom " : 0,
" visitor _ uk " : null ,
" visitor _ avatar " : null ,
" timestamp " : 1458198232,
" sign " : " xxxx " ,
" sekey " : " xxxx " ,
" novelid " : false ,
" is _ master _ vip " : 0,
" urlparam " : [ ] ,
" XDUSS " : " null " """
|
# 这里无论是短链接还是长链接如果带密码 , 则都被重定向到长链接 , 可以直接取出shareid , uk
# 而如果是不带密码的分享 , 则此时还不需要shareid , uk
respond = self . _request ( None , url = url )
target_url = respond . url
surl = re . search ( r"surl=([a-zA-Z\d]+)" , target_url )
if surl is None :
surl = re . search ( r"s/([a-zA-Z\d]+)" , target_url ) . group ( 1 ) [ 1 : ]
else :
surl = surl . group ( 1 )
data = { "pwd" : password , "t" : str ( int ( time . time ( ) ) ) , }
url = "http://pan.baidu.com/share/verify?surl=" + surl
verify_result = json . loads ( self . _request ( None , data = data , url = url ) . content )
if not verify_result or verify_result [ 'errno' ] != 0 :
return verify_result
# shareid , uk = None , None
# m = re . search ( r " shareid = ( \ d + ) " , target _ url )
# if m :
# shareid = m . group ( 1)
# m = re . search ( r " uk = ( \ d + ) " , target _ url )
# if m :
# uk = m . group ( 1)
# # 检查验证码 , 如果成功 , 当前用户就被授权直接访问资源了
# if shareid and uk and password :
# verify _ result = self . _ verify _ shared _ file ( shareid , uk , password )
# if not verify _ result or verify _ result [ ' errno ' ] ! = 0:
# return verify _ result
# 从html中解析文件列表 , 同时把shareid , uk也解析出来
html = self . _request ( None , url = target_url ) . text
r = re . compile ( r".*yunData\.setData\((.*?)\);.*" )
m = r . search ( html )
if m :
context = json . loads ( m . group ( 1 ) )
file_list = context [ 'file_list' ] [ 'list' ]
uk = str ( context [ 'uk' ] )
shareid = str ( context [ 'shareid' ] )
ret = { "filelist" : [ ] }
for f in file_list :
file_obj = { 'filename' : f [ 'server_filename' ] , 'size' : f [ 'size' ] , 'isdir' : f [ 'isdir' ] }
if not filter_callback or filter_callback ( file_obj ) :
ret [ 'filelist' ] . append ( f [ 'path' ] )
save_share_file_ret = self . _save_shared_file_list ( shareid , uk , path , ret [ 'filelist' ] )
if save_share_file_ret and save_share_file_ret [ 'errno' ] == 0 :
return save_share_file_ret
else :
return ret
else : # 获取文件列表失败
return { "errno" : - 1 , "error_msg" : "PCS.save_share_list failed, mayby url is incorrect!" }
|
def p_int ( self , tree ) :
'''V : : = INTEGER'''
|
tree . value = int ( tree . attr )
tree . svalue = tree . attr
|
def read ( self , length , timeout_ms = 0 , blocking = False ) :
"""Read an Input report from a HID device with timeout .
Input reports are returned to the host through the ` INTERRUPT IN `
endpoint . The first byte will contain the Report number if the device
uses numbered reports .
By default reads are non - blocking , i . e . the method will return
` None ` if no data was available . Blocking reads can be enabled with
: param blocking : . Additionally , a timeout for the read can be
specified .
: param length : The number of bytes to read . For devices with
multiple reports , make sure to read an extra byte
for the report number .
: param timeout _ ms : Timeout in miliseconds
: type timeout _ ms : int
: param blocking : Block until data is available"""
|
self . _check_device_status ( )
bufp = ffi . new ( "unsigned char[]" , length )
if not timeout_ms and blocking :
timeout_ms = - 1
if timeout_ms :
rv = hidapi . hid_read_timeout ( self . _device , bufp , length , timeout_ms )
else :
rv = hidapi . hid_read ( self . _device , bufp , length )
if rv == - 1 :
raise IOError ( "Failed to read from HID device: {0}" . format ( self . _get_last_error_string ( ) ) )
elif rv == 0 :
return None
else :
return ffi . buffer ( bufp , rv ) [ : ]
|
def get_now_datetime_filestamp ( longTime = False ) :
"""* A datetime stamp to be appended to the end of filenames : ` ` YYYYMMDDtHHMMSS ` ` *
* * Key Arguments : * *
- ` ` longTime ` ` - - make time string longer ( more change of filenames being unique )
* * Return : * *
- ` ` now ` ` - - current time and date in filename format
* * Usage : * *
. . code - block : : python
from fundamentals . download import get _ now _ datetime _ filestamp
get _ now _ datetime _ filestamp ( longTime = False )
# Out : ' 20160316t154635'
get _ now _ datetime _ filestamp ( longTime = True )
# Out : ' 20160316t154644133638'"""
|
# # > IMPORTS # #
from datetime import datetime , date , time
now = datetime . now ( )
if longTime :
now = now . strftime ( "%Y%m%dt%H%M%S%f" )
else :
now = now . strftime ( "%Y%m%dt%H%M%S" )
return now
|
def _remove_leading_dots_for_smtp_transparency_support ( self , input_data ) :
"""Uses the input data to recover the original payload ( includes
transparency support as specified in RFC 821 , Section 4.5.2 ) ."""
|
regex = re . compile ( '^\.\.' , re . MULTILINE )
data_without_transparency_dots = regex . sub ( '.' , input_data )
return re . sub ( '\r\n' , '\n' , data_without_transparency_dots )
|
def ends_at ( self , time_point ) :
"""Returns ` ` True ` ` if this interval ends at the given time point .
: param time _ point : the time point to test
: type time _ point : : class : ` ~ aeneas . exacttiming . TimeValue `
: raises TypeError : if ` ` time _ point ` ` is not an instance of ` ` TimeValue ` `
: rtype : bool"""
|
if not isinstance ( time_point , TimeValue ) :
raise TypeError ( u"time_point is not an instance of TimeValue" )
return self . end == time_point
|
def distance_sphere ( self , other , radius = 6371.0 ) :
'''- - Deprecated in v0.70 . Use distance ( other , ellipse = ' sphere ' ) instead - -
Returns great circle distance between two lat / lon coordinates on a sphere
using the Haversine formula . The default radius corresponds to the FAI sphere
with units in km .'''
|
warnings . warn ( "Deprecated in v0.70. Use distance(other, ellipse = 'sphere') instead" , DeprecationWarning )
lat1 , lon1 = self . lat . decimal_degree , self . lon . decimal_degree
lat2 , lon2 = other . lat . decimal_degree , other . lon . decimal_degree
pi = math . pi / 180.
# phi is 90 - latitude
phi1 = ( 90. - lat1 ) * pi
phi2 = ( 90. - lat2 ) * pi
# theta is longitude
theta1 = lon1 * pi
theta2 = lon2 * pi
cos = ( math . sin ( phi1 ) * math . sin ( phi2 ) * math . cos ( theta1 - theta2 ) + math . cos ( phi1 ) * math . cos ( phi2 ) )
arc = math . acos ( cos )
return arc * radius
|
def list ( context , resource , ** kwargs ) :
"""List all resources"""
|
data = utils . sanitize_kwargs ( ** kwargs )
id = data . pop ( 'id' , None )
subresource = data . pop ( 'subresource' , None )
if subresource :
uri = '%s/%s/%s/%s' % ( context . dci_cs_api , resource , id , subresource )
else :
uri = '%s/%s' % ( context . dci_cs_api , resource )
return context . session . get ( uri , timeout = HTTP_TIMEOUT , params = data )
|
def post ( self ) :
"""Register a new model ( models )"""
|
self . set_header ( "Content-Type" , "application/json" )
key = uuid . uuid4 ( ) . hex
metadata = json . loads ( self . request . body . decode ( ) )
metadata [ "uuid" ] = key
self . database [ key ] = metadata
result = json . dumps ( { "uuid" : key } )
self . write ( result )
|
def plot_related_data ( x , y , code , ylabel , fileName , options ) :
"""Plot Z1 and Z2 in function of IBS2 * ratio .
: param x : the x axis of the plot ( ` ` IBS2 ratio ` ` ) .
: param y : the y axis of the plot ( either ` ` z1 ` ` or ` ` z2 ` ` ) .
: param code : the code of the relatedness of each sample pair .
: param ylabel : the label of the y axis ( either ` ` z1 ` ` or ` ` z2 ` ` ) .
: param fileName : the name of the output file .
: param options : the options .
: type x : numpy . array of floats
: type y : numpy . array of floats
: type code : numpy . array
: type ylabel : str
: type fileName : str
: type options : argparse . Namespace
There are four different relation codes ( represented by 4 different color
in the plots :
Code Relation Color
1 Full - sbis ` ` # CC0000 ` `
2 Half - sibs or Grand - parent - Child or Uncle - Nephew ` ` # 0099CC ` `
3 Parent - Child ` ` # FF8800 ` `
4 Twins or Duplicated samples ` ` # 9933CC ` `
Sample pairs with unknown relation are plotted using ` ` # 669900 ` ` as color ."""
|
import matplotlib as mpl
if mpl . get_backend ( ) != "agg" :
mpl . use ( "Agg" )
import matplotlib . pyplot as plt
plt . ioff ( )
fig = plt . figure ( )
ax = fig . add_subplot ( 111 )
# Setting the title , the X and Y label
ax . set_title ( ( r"%d pairs with $IBS2^\ast_{ratio} >$ " r"%f" % ( len ( code ) , options . ibs2_ratio ) ) )
ax . set_xlabel ( r"$IBS2^\ast_{ratio}$" )
ax . set_ylabel ( ylabel )
# Plotting the data ( there are 5 error codes )
c5 , = ax . plot ( x [ code == "5" ] , y [ code == "5" ] , "o" , ms = 3 , mec = "#669900" , mfc = "#669900" )
c1 , = ax . plot ( x [ code == "1" ] , y [ code == "1" ] , "o" , ms = 3 , mec = "#CC0000" , mfc = "#CC0000" )
c2 , = ax . plot ( x [ code == "2" ] , y [ code == "2" ] , "o" , ms = 3 , mec = "#0099CC" , mfc = "#0099CC" )
c3 , = ax . plot ( x [ code == "3" ] , y [ code == "3" ] , "o" , ms = 3 , mec = "#FF8800" , mfc = "#FF8800" )
c4 , = ax . plot ( x [ code == "4" ] , y [ code == "4" ] , "o" , ms = 3 , mec = "#9933CC" , mfc = "#9933CC" )
# The legend
prop = mpl . font_manager . FontProperties ( size = 8 )
leg = ax . legend ( [ c1 , c2 , c3 , c4 , c5 ] , [ "Full sibs (n={})" . format ( np . sum ( code == "1" ) ) , ( "Half sibs, grand-parent-child or uncle-nephew " "(n={})" . format ( np . sum ( code == "2" ) ) ) , "Parent-child (n={})" . format ( np . sum ( code == "3" ) ) , ( "Twins or duplicated samples " "(n={})" . format ( np . sum ( code == "4" ) ) ) , "Unknown (n={})" . format ( np . sum ( code == "5" ) ) ] , loc = "best" , numpoints = 1 , fancybox = True , prop = prop )
leg . get_frame ( ) . set_alpha ( 0.5 )
# Setting the limits
ax . set_xlim ( ( options . ibs2_ratio - 0.01 , 1.01 ) )
ax . set_ylim ( ( - 0.01 , 1.01 ) )
# Modifying the spines
ax . xaxis . set_ticks_position ( "bottom" )
ax . yaxis . set_ticks_position ( "left" )
ax . spines [ "top" ] . set_visible ( False )
ax . spines [ "right" ] . set_visible ( False )
# Saving the figure
plt . savefig ( fileName )
|
def next_batch ( self , n = 1 ) :
"""Return the next requests that should be dispatched ."""
|
if len ( self . queue ) == 0 :
return [ ]
batch = list ( reversed ( ( self . queue [ - n : ] ) ) )
self . queue = self . queue [ : - n ]
return batch
|
def model_data ( self ) :
"""str : The model location in S3 . Only set if Estimator has been ` ` fit ( ) ` ` ."""
|
if self . latest_training_job is not None :
model_uri = self . sagemaker_session . sagemaker_client . describe_training_job ( TrainingJobName = self . latest_training_job . name ) [ 'ModelArtifacts' ] [ 'S3ModelArtifacts' ]
else :
logging . warning ( 'No finished training job found associated with this estimator. Please make sure' 'this estimator is only used for building workflow config' )
model_uri = os . path . join ( self . output_path , self . _current_job_name , 'output' , 'model.tar.gz' )
return model_uri
|
def read_file ( self , infile ) :
"""Read a reST file into a string ."""
|
try :
with open ( infile , 'rt' ) as file :
return file . read ( )
except UnicodeDecodeError as e :
err_exit ( 'Error reading %s: %s' % ( infile , e ) )
except ( IOError , OSError ) as e :
err_exit ( 'Error reading %s: %s' % ( infile , e . strerror or e ) )
|
def save_devices ( self ) :
"""save devices that have been obtained from LaMetric cloud
to a local file"""
|
log . debug ( "saving devices to ''..." . format ( self . _devices_filename ) )
if self . _devices != [ ] :
with codecs . open ( self . _devices_filename , "wb" , "utf-8" ) as f :
json . dump ( self . _devices , f )
|
def scatter ( * args , ** kwargs ) :
"""This will plot a scatterplot of x and y , iterating over the ColorBrewer
" Set2 " color cycle unless a color is specified . The symbols produced are
empty circles , with the outline in the color specified by either ' color '
or ' edgecolor ' . If you want to fill the circle , specify ' facecolor ' .
Besides the matplotlib scatter ( ) , will also take the parameter
@ param show _ ticks : Whether or not to show the x and y axis ticks"""
|
# Force ' color ' to indicate the edge color , so the middle of the
# scatter patches are empty . Can specify
ax , args , kwargs = utils . maybe_get_ax ( * args , ** kwargs )
if 'color' not in kwargs : # Assume that color means the edge color . You can assign the
color_cycle = ax . _get_lines . color_cycle
kwargs [ 'color' ] = next ( color_cycle )
kwargs . setdefault ( 'edgecolor' , almost_black )
kwargs . setdefault ( 'alpha' , 0.5 )
lw = utils . maybe_get_linewidth ( ** kwargs )
kwargs [ 'lw' ] = lw
show_ticks = kwargs . pop ( 'show_ticks' , False )
scatterpoints = ax . scatter ( * args , ** kwargs )
utils . remove_chartjunk ( ax , [ 'top' , 'right' ] , show_ticks = show_ticks )
return scatterpoints
|
def parse_param_signature ( sig ) :
"""Parse a parameter signature of the form : type name ( = default ) ?"""
|
match = PARAM_SIG_RE . match ( sig . strip ( ) )
if not match :
raise RuntimeError ( 'Parameter signature invalid, got ' + sig )
groups = match . groups ( )
modifiers = groups [ 0 ] . split ( )
typ , name , _ , default = groups [ - 4 : ]
return ParamTuple ( name = name , typ = typ , default = default , modifiers = modifiers )
|
async def probe ( self ) :
"""Probe for devices .
This method will probe all adapters that can probe and will send a
notification for all devices that we have seen from all adapters .
See : meth : ` AbstractDeviceAdapter . probe ` ."""
|
for adapter in self . adapters :
if adapter . get_config ( 'probe_supported' , False ) :
await adapter . probe ( )
|
def alter_old_distutils_request ( request : WSGIRequest ) :
"""Alter the request body for compatibility with older distutils clients
Due to a bug in the Python distutils library , the request post is sent
using \n as a separator instead of the \r \n that the HTTP spec demands .
This breaks the Django form parser and therefore we have to write a
custom parser .
This bug was fixed in the Python 2.7.4 and 3.4:
http : / / bugs . python . org / issue10510"""
|
# We first need to retrieve the body before accessing POST or FILES since
# it can only be read once .
body = request . body
if request . POST or request . FILES :
return
new_body = BytesIO ( )
# Split the response in the various parts based on the boundary string
content_type , opts = parse_header ( request . META [ 'CONTENT_TYPE' ] . encode ( 'ascii' ) )
parts = body . split ( b'\n--' + opts [ 'boundary' ] + b'\n' )
for part in parts :
if b'\n\n' not in part :
continue
headers , content = part . split ( b'\n\n' , 1 )
if not headers :
continue
new_body . write ( b'--' + opts [ 'boundary' ] + b'\r\n' )
new_body . write ( headers . replace ( b'\n' , b'\r\n' ) )
new_body . write ( b'\r\n\r\n' )
new_body . write ( content )
new_body . write ( b'\r\n' )
new_body . write ( b'--' + opts [ 'boundary' ] + b'--\r\n' )
request . _body = new_body . getvalue ( )
request . META [ 'CONTENT_LENGTH' ] = len ( request . _body )
# Clear out _ files and _ post so that the request object re - parses the body
if hasattr ( request , '_files' ) :
delattr ( request , '_files' )
if hasattr ( request , '_post' ) :
delattr ( request , '_post' )
|
def cluster_nodes ( self ) :
"""Each node in a Redis Cluster has its view of the current cluster
configuration , given by the set of known nodes , the state of the
connection we have with such nodes , their flags , properties and
assigned slots , and so forth .
` ` CLUSTER NODES ` ` provides all this information , that is , the current
cluster configuration of the node we are contacting , in a serialization
format which happens to be exactly the same as the one used by Redis
Cluster itself in order to store on disk the cluster state ( however the
on disk cluster state has a few additional info appended at the end ) .
Note that normally clients willing to fetch the map between Cluster
hash slots and node addresses should use ` ` CLUSTER SLOTS ` ` instead .
` ` CLUSTER NODES ` ` , that provides more information , should be used for
administrative tasks , debugging , and configuration inspections . It is
also used by ` ` redis - trib ` ` in order to manage a cluster .
. . versionadded : : 0.7.0
: rtype : list ( : class : ` ~ tredis . cluster . ClusterNode ` )
: raises : : exc : ` ~ tredis . exceptions . RedisError `"""
|
def format_response ( result ) :
values = [ ]
for row in result . decode ( 'utf-8' ) . split ( '\n' ) :
if not row :
continue
parts = row . split ( ' ' )
slots = [ ]
for slot in parts [ 8 : ] :
if '-' in slot :
sparts = slot . split ( '-' )
slots . append ( ( int ( sparts [ 0 ] ) , int ( sparts [ 1 ] ) ) )
else :
slots . append ( ( int ( slot ) , int ( slot ) ) )
ip_port = common . split_connection_host_port ( parts [ 1 ] )
values . append ( ClusterNode ( parts [ 0 ] , ip_port [ 0 ] , ip_port [ 1 ] , parts [ 2 ] , parts [ 3 ] , int ( parts [ 4 ] ) , int ( parts [ 5 ] ) , int ( parts [ 6 ] ) , parts [ 7 ] , slots ) )
return values
return self . _execute ( [ 'CLUSTER' , 'NODES' ] , format_callback = format_response )
|
def language_name ( self , language = DEFAULT_LANGUAGE , min_score : int = 75 ) -> str :
"""Give the name of the language ( not the entire tag , just the language part )
in a natural language . The target language can be given as a string or
another Language object .
By default , things are named in English :
> > > Language . get ( ' fr ' ) . language _ name ( )
' French '
> > > Language . get ( ' el ' ) . language _ name ( )
' Greek '
But you can ask for language names in numerous other languages :
> > > Language . get ( ' fr ' ) . language _ name ( ' fr ' )
' français '
> > > Language . get ( ' el ' ) . language _ name ( ' fr ' )
' grec '
Why does everyone get Slovak and Slovenian confused ? Let ' s ask them .
> > > Language . get ( ' sl ' ) . language _ name ( ' sl ' )
' slovenščina '
> > > Language . get ( ' sk ' ) . language _ name ( ' sk ' )
' slovenčina '
> > > Language . get ( ' sl ' ) . language _ name ( ' sk ' )
' slovinčina '
> > > Language . get ( ' sk ' ) . language _ name ( ' sl ' )
' slovaščina '"""
|
return self . _get_name ( 'language' , language , min_score )
|
def get_definitions ( self , project , name = None , repository_id = None , repository_type = None , query_order = None , top = None , continuation_token = None , min_metrics_time = None , definition_ids = None , path = None , built_after = None , not_built_after = None , include_all_properties = None , include_latest_builds = None , task_id_filter = None , process_type = None , yaml_filename = None ) :
"""GetDefinitions .
Gets a list of definitions .
: param str project : Project ID or project name
: param str name : If specified , filters to definitions whose names match this pattern .
: param str repository _ id : A repository ID . If specified , filters to definitions that use this repository .
: param str repository _ type : If specified , filters to definitions that have a repository of this type .
: param str query _ order : Indicates the order in which definitions should be returned .
: param int top : The maximum number of definitions to return .
: param str continuation _ token : A continuation token , returned by a previous call to this method , that can be used to return the next set of definitions .
: param datetime min _ metrics _ time : If specified , indicates the date from which metrics should be included .
: param [ int ] definition _ ids : A comma - delimited list that specifies the IDs of definitions to retrieve .
: param str path : If specified , filters to definitions under this folder .
: param datetime built _ after : If specified , filters to definitions that have builds after this date .
: param datetime not _ built _ after : If specified , filters to definitions that do not have builds after this date .
: param bool include _ all _ properties : Indicates whether the full definitions should be returned . By default , shallow representations of the definitions are returned .
: param bool include _ latest _ builds : Indicates whether to return the latest and latest completed builds for this definition .
: param str task _ id _ filter : If specified , filters to definitions that use the specified task .
: param int process _ type : If specified , filters to definitions with the given process type .
: param str yaml _ filename : If specified , filters to YAML definitions that match the given filename .
: rtype : [ BuildDefinitionReference ]"""
|
route_values = { }
if project is not None :
route_values [ 'project' ] = self . _serialize . url ( 'project' , project , 'str' )
query_parameters = { }
if name is not None :
query_parameters [ 'name' ] = self . _serialize . query ( 'name' , name , 'str' )
if repository_id is not None :
query_parameters [ 'repositoryId' ] = self . _serialize . query ( 'repository_id' , repository_id , 'str' )
if repository_type is not None :
query_parameters [ 'repositoryType' ] = self . _serialize . query ( 'repository_type' , repository_type , 'str' )
if query_order is not None :
query_parameters [ 'queryOrder' ] = self . _serialize . query ( 'query_order' , query_order , 'str' )
if top is not None :
query_parameters [ '$top' ] = self . _serialize . query ( 'top' , top , 'int' )
if continuation_token is not None :
query_parameters [ 'continuationToken' ] = self . _serialize . query ( 'continuation_token' , continuation_token , 'str' )
if min_metrics_time is not None :
query_parameters [ 'minMetricsTime' ] = self . _serialize . query ( 'min_metrics_time' , min_metrics_time , 'iso-8601' )
if definition_ids is not None :
definition_ids = "," . join ( map ( str , definition_ids ) )
query_parameters [ 'definitionIds' ] = self . _serialize . query ( 'definition_ids' , definition_ids , 'str' )
if path is not None :
query_parameters [ 'path' ] = self . _serialize . query ( 'path' , path , 'str' )
if built_after is not None :
query_parameters [ 'builtAfter' ] = self . _serialize . query ( 'built_after' , built_after , 'iso-8601' )
if not_built_after is not None :
query_parameters [ 'notBuiltAfter' ] = self . _serialize . query ( 'not_built_after' , not_built_after , 'iso-8601' )
if include_all_properties is not None :
query_parameters [ 'includeAllProperties' ] = self . _serialize . query ( 'include_all_properties' , include_all_properties , 'bool' )
if include_latest_builds is not None :
query_parameters [ 'includeLatestBuilds' ] = self . _serialize . query ( 'include_latest_builds' , include_latest_builds , 'bool' )
if task_id_filter is not None :
query_parameters [ 'taskIdFilter' ] = self . _serialize . query ( 'task_id_filter' , task_id_filter , 'str' )
if process_type is not None :
query_parameters [ 'processType' ] = self . _serialize . query ( 'process_type' , process_type , 'int' )
if yaml_filename is not None :
query_parameters [ 'yamlFilename' ] = self . _serialize . query ( 'yaml_filename' , yaml_filename , 'str' )
response = self . _send ( http_method = 'GET' , location_id = 'dbeaf647-6167-421a-bda9-c9327b25e2e6' , version = '5.0' , route_values = route_values , query_parameters = query_parameters )
return self . _deserialize ( '[BuildDefinitionReference]' , self . _unwrap_collection ( response ) )
|
def parse_preview ( self , raw_content ) :
"""Parse the preview part of the content ,
and return the parsed string and whether there is more content or not .
If the preview part is equal to the whole part ,
the second element of the returned tuple will be False , else True .
: param raw _ content : raw content
: return : tuple ( parsed string , whether there is more content or not )"""
|
if self . _read_more_exp is None :
return self . parse_whole ( raw_content ) , False
sp = self . _read_more_exp . split ( raw_content , maxsplit = 1 )
if len ( sp ) == 2 and sp [ 0 ] :
has_more_content = True
result = sp [ 0 ] . rstrip ( )
else :
has_more_content = False
result = raw_content
# since the preview part contains no read _ more _ sep ,
# we can safely use the parse _ whole method
return self . parse_whole ( result ) , has_more_content
|
def quote ( query ) :
'''Quote query with sign ( ' )'''
|
if query . startswith ( '\'' ) is not True :
query = '\'' + query
if query . endswith ( '\'' ) is not True :
query = query + '\''
return query
|
def make_spatialmap_source ( name , Spatial_Filename , spectrum ) :
"""Construct and return a ` fermipy . roi _ model . Source ` object"""
|
data = dict ( Spatial_Filename = Spatial_Filename , ra = 0.0 , dec = 0.0 , SpatialType = 'SpatialMap' , Source_Name = name )
if spectrum is not None :
data . update ( spectrum )
return roi_model . Source ( name , data )
|
def get_bool_resources ( self , package_name , locale = '\x00\x00' ) :
"""Get the XML ( as string ) of all resources of type ' bool ' .
Read more about bool resources :
https : / / developer . android . com / guide / topics / resources / more - resources . html # Bool
: param package _ name : the package name to get the resources for
: param locale : the locale to get the resources for ( default : ' \x00 \x00 ' )"""
|
self . _analyse ( )
buff = '<?xml version="1.0" encoding="utf-8"?>\n'
buff += '<resources>\n'
try :
for i in self . values [ package_name ] [ locale ] [ "bool" ] :
buff += '<bool name="{}">{}</bool>\n' . format ( i [ 0 ] , i [ 1 ] )
except KeyError :
pass
buff += '</resources>\n'
return buff . encode ( 'utf-8' )
|
def update ( cls , card_id , card_generated_cvc2_id , type_ = None , custom_headers = None ) :
""": type user _ id : int
: type card _ id : int
: type card _ generated _ cvc2 _ id : int
: param type _ : The type of generated cvc2 . Can be STATIC or GENERATED .
: type type _ : str
: type custom _ headers : dict [ str , str ] | None
: rtype : BunqResponseInt"""
|
if custom_headers is None :
custom_headers = { }
api_client = client . ApiClient ( cls . _get_api_context ( ) )
request_map = { cls . FIELD_TYPE : type_ }
request_map_string = converter . class_to_json ( request_map )
request_map_string = cls . _remove_field_for_request ( request_map_string )
request_bytes = request_map_string . encode ( )
request_bytes = security . encrypt ( cls . _get_api_context ( ) , request_bytes , custom_headers )
endpoint_url = cls . _ENDPOINT_URL_UPDATE . format ( cls . _determine_user_id ( ) , card_id , card_generated_cvc2_id )
response_raw = api_client . put ( endpoint_url , request_bytes , custom_headers )
return BunqResponseInt . cast_from_bunq_response ( cls . _process_for_id ( response_raw ) )
|
def read_file_1st_col_only ( fname ) :
"""read a CSV file ( ref _ classes . csv ) and return the
list of names"""
|
lst = [ ]
with open ( fname , 'r' ) as f :
_ = f . readline ( )
# read the header and ignore it
for line in f :
lst . append ( line . split ( ',' ) [ 0 ] )
return lst
|
def set_min_requests_per_connection ( self , host_distance , min_requests ) :
"""Sets a threshold for concurrent requests per connection , below which
connections will be considered for disposal ( down to core connections ;
see : meth : ` ~ Cluster . set _ core _ connections _ per _ host ` ) .
Pertains to connection pool management in protocol versions { 1,2 } ."""
|
if self . protocol_version >= 3 :
raise UnsupportedOperation ( "Cluster.set_min_requests_per_connection() only has an effect " "when using protocol_version 1 or 2." )
if min_requests < 0 or min_requests > 126 or min_requests >= self . _max_requests_per_connection [ host_distance ] :
raise ValueError ( "min_requests must be 0-126 and less than the max_requests for this host_distance (%d)" % ( self . _min_requests_per_connection [ host_distance ] , ) )
self . _min_requests_per_connection [ host_distance ] = min_requests
|
def on_message ( self , unused_channel , basic_deliver , properties , body ) :
"""Invoked by pika when a message is delivered from RabbitMQ . The
channel is passed for your convenience . The basic _ deliver object that
is passed in carries the exchange , routing key , delivery tag and
a redelivered flag for the message . The properties passed in is an
instance of BasicProperties with the message properties and the body
is the message that was sent .
: param pika . channel . Channel unused _ channel : The channel object
: param pika . Spec . Basic . Deliver : basic _ deliver method
: param pika . Spec . BasicProperties : properties
: param str | unicode body : The message body"""
|
_logger . debug ( 'Received message # %s from %s: %s' , basic_deliver . delivery_tag , properties . app_id , body )
if self . ws and body :
self . messages . append ( body )
_logger . debug ( 'out ws : %s' , len ( self . ws ) )
_logger . debug ( 'out messages : %s' , len ( self . messages ) )
self . acknowledge_message ( basic_deliver . delivery_tag )
|
def visit_comprehension ( self , node ) :
"""return an astroid . Comprehension node as string"""
|
ifs = "" . join ( " if %s" % n . accept ( self ) for n in node . ifs )
return "for %s in %s%s" % ( node . target . accept ( self ) , node . iter . accept ( self ) , ifs , )
|
def validateDocumentFinal ( self , doc ) :
"""Does the final step for the document validation once all
the incremental validation steps have been completed
basically it does the following checks described by the XML
Rec Check all the IDREF / IDREFS attributes definition for
validity"""
|
if doc is None :
doc__o = None
else :
doc__o = doc . _o
ret = libxml2mod . xmlValidateDocumentFinal ( self . _o , doc__o )
return ret
|
def show_print_dialog ( self ) :
"""Open the print dialog"""
|
if not self . impact_function : # Now try to read the keywords and show them in the dock
try :
active_layer = self . iface . activeLayer ( )
keywords = self . keyword_io . read_keywords ( active_layer )
provenances = keywords . get ( 'provenance_data' , { } )
extra_keywords = keywords . get ( 'extra_keywords' , { } )
is_multi_exposure = ( extra_keywords . get ( extra_keyword_analysis_type [ 'key' ] ) == ( MULTI_EXPOSURE_ANALYSIS_FLAG ) )
if provenances and is_multi_exposure :
self . impact_function = ( MultiExposureImpactFunction . load_from_output_metadata ( keywords ) )
else :
self . impact_function = ( ImpactFunction . load_from_output_metadata ( keywords ) )
except ( KeywordNotFoundError , HashNotFoundError , InvalidParameterError , NoKeywordsFoundError , MetadataReadError , # AttributeError This is hiding some real error . ET
) as e : # Added this check in 3.2 for # 1861
active_layer = self . iface . activeLayer ( )
LOGGER . debug ( e )
if active_layer is None :
if self . conflicting_plugin_detected :
send_static_message ( self , conflicting_plugin_message ( ) )
else :
send_static_message ( self , getting_started_message ( ) )
else :
show_no_keywords_message ( self )
except Exception as e : # pylint : disable = broad - except
error_message = get_error_message ( e )
send_error_message ( self , error_message )
if self . impact_function :
dialog = PrintReportDialog ( self . impact_function , self . iface , dock = self , parent = self )
dialog . show ( )
else :
display_critical_message_bar ( "InaSAFE" , self . tr ( 'Please select a valid layer before printing. ' 'No Impact Function found.' ) , iface_object = self )
|
def add_edge ( self , fr , to ) :
"""Add an outward edge to a vertex
: param fr : The source vertex .
: param to : The name of the outward edge ."""
|
if fr not in set ( self . graph . vs ) : # ToDo : find out why item can be in set but not dict
raise ValueError ( 'can not connect unknown vertices in n-partite graphs, {!r} missing' . format ( fr ) )
elif to not in set ( self . graph . vs ) :
raise ValueError ( 'can not connect unknown vertices in n-partite graphs, {!r} missing' . format ( to ) )
self . graph . add_edge ( fr , to )
|
def read_int64 ( self , little_endian = True ) :
"""Read 8 bytes as a signed integer value from the stream .
Args :
little _ endian ( bool ) : specify the endianness . ( Default ) Little endian .
Returns :
int :"""
|
if little_endian :
endian = "<"
else :
endian = ">"
return self . unpack ( '%sq' % endian , 8 )
|
def CountHuntFlows ( self , hunt_id , filter_condition = db . HuntFlowsCondition . UNSET , cursor = None ) :
"""Counts hunt flows matching given conditions ."""
|
hunt_id_int = db_utils . HuntIDToInt ( hunt_id )
query = ( "SELECT COUNT(*) FROM flows " "FORCE INDEX(flows_by_hunt) " "WHERE parent_hunt_id = %s AND parent_flow_id IS NULL " "{filter_condition}" )
filter_query , extra_args = self . _HuntFlowCondition ( filter_condition )
args = [ hunt_id_int ] + extra_args
query = query . format ( filter_condition = filter_query )
cursor . execute ( query , args )
return cursor . fetchone ( ) [ 0 ]
|
def get ( name : str , required : bool = False , default : Union [ Type [ empty ] , T ] = empty , type : Type [ T ] = None ) -> T :
"""Generic getter for environment variables . Handles defaults ,
required - ness , and what type to expect .
: param name : The name of the environment variable be pulled
: type name : str
: param required : Whether the environment variable is required . If ` ` True ` `
and the variable is not present , a ` ` KeyError ` ` is raised .
: type required : bool
: param default : The value to return if the environment variable is not
present . ( Providing a default alongside setting ` ` required = True ` ` will raise
a ` ` ValueError ` ` )
: type default : bool
: param type : The type of variable expected .
: param type : str or type"""
|
fns = { 'int' : env_int , int : env_int , # ' float ' : env _ float ,
# float : env _ float ,
'bool' : env_bool , bool : env_bool , 'string' : env_string , str : env_string , 'list' : env_list , list : env_list , }
# type : Dict [ Union [ str , Type [ Any ] ] , Callable [ . . . , Any ] ]
fn = fns . get ( type , env_string )
return fn ( name , default = default , required = required )
|
def rebase_opt ( self ) :
"""Determine which option name to use ."""
|
if not hasattr ( self , '_rebase_opt' ) : # out = b " MAJOR . MINOR . REVISION " / / b " 3.4.19 " or b " 4.0.0"
out , err = Popen ( [ 'cleancss' , '--version' ] , stdout = PIPE ) . communicate ( )
ver = int ( out [ : out . index ( b'.' ) ] )
self . _rebase_opt = [ '--root' , self . root ] if ver == 3 else [ ]
return self . _rebase_opt
|
def channels_voice_phone_number_show ( self , id , ** kwargs ) :
"https : / / developer . zendesk . com / rest _ api / docs / voice - api / phone _ numbers # show - phone - number"
|
api_path = "/api/v2/channels/voice/phone_numbers/{id}.json"
api_path = api_path . format ( id = id )
return self . call ( api_path , ** kwargs )
|
def _checkFileExists ( self ) :
"""Verifies that the underlying file exists and sets the _ exception attribute if not
Returns True if the file exists .
If self . _ fileName is None , nothing is checked and True is returned ."""
|
if self . _fileName and not os . path . exists ( self . _fileName ) :
msg = "File not found: {}" . format ( self . _fileName )
logger . error ( msg )
self . setException ( IOError ( msg ) )
return False
else :
return True
|
def build_gui ( self , container ) :
"""Build GUI such that image list area is maximized ."""
|
vbox , sw , orientation = Widgets . get_oriented_box ( container )
captions = ( ( 'Channel:' , 'label' , 'Channel Name' , 'combobox' , 'Modified only' , 'checkbutton' ) , )
w , b = Widgets . build_info ( captions , orientation = orientation )
self . w . update ( b )
b . channel_name . set_tooltip ( 'Channel for locating images to save' )
b . channel_name . add_callback ( 'activated' , self . select_channel_cb )
mod_only = self . settings . get ( 'modified_only' , True )
b . modified_only . set_state ( mod_only )
b . modified_only . add_callback ( 'activated' , lambda * args : self . redo ( ) )
b . modified_only . set_tooltip ( "Show only locally modified images" )
container . add_widget ( w , stretch = 0 )
captions = ( ( 'Path:' , 'llabel' , 'OutDir' , 'entry' , 'Browse' , 'button' ) , ( 'Suffix:' , 'llabel' , 'Suffix' , 'entry' ) )
w , b = Widgets . build_info ( captions , orientation = orientation )
self . w . update ( b )
b . outdir . set_text ( self . outdir )
b . outdir . set_tooltip ( 'Output directory' )
b . outdir . add_callback ( 'activated' , lambda w : self . set_outdir ( ) )
b . browse . set_tooltip ( 'Browse for output directory' )
b . browse . add_callback ( 'activated' , lambda w : self . browse_outdir ( ) )
b . suffix . set_text ( self . suffix )
b . suffix . set_tooltip ( 'Suffix to append to filename' )
b . suffix . add_callback ( 'activated' , lambda w : self . set_suffix ( ) )
container . add_widget ( w , stretch = 0 )
self . treeview = Widgets . TreeView ( auto_expand = True , sortable = True , selection = 'multiple' , use_alt_row_color = True )
self . treeview . setup_table ( self . columns , 1 , 'IMAGE' )
self . treeview . add_callback ( 'selected' , self . toggle_save_cb )
container . add_widget ( self . treeview , stretch = 1 )
captions = ( ( 'Status' , 'llabel' ) , )
w , b = Widgets . build_info ( captions , orientation = orientation )
self . w . update ( b )
b . status . set_text ( '' )
b . status . set_tooltip ( 'Status message' )
container . add_widget ( w , stretch = 0 )
btns = Widgets . HBox ( )
btns . set_border_width ( 4 )
btns . set_spacing ( 3 )
btn = Widgets . Button ( 'Save' )
btn . set_tooltip ( 'Save selected image(s)' )
btn . add_callback ( 'activated' , lambda w : self . save_images ( ) )
btn . set_enabled ( False )
btns . add_widget ( btn , stretch = 0 )
self . w . save = btn
btn = Widgets . Button ( 'Close' )
btn . add_callback ( 'activated' , lambda w : self . close ( ) )
btns . add_widget ( btn , stretch = 0 )
btn = Widgets . Button ( "Help" )
btn . add_callback ( 'activated' , lambda w : self . help ( ) )
btns . add_widget ( btn , stretch = 0 )
btns . add_widget ( Widgets . Label ( '' ) , stretch = 1 )
container . add_widget ( btns , stretch = 0 )
self . gui_up = True
# Initialize directory selection dialog
self . dirsel = DirectorySelection ( self . fv . w . root . get_widget ( ) )
# Generate initial listing
self . update_channels ( )
|
def map_compute_fov ( m : tcod . map . Map , x : int , y : int , radius : int = 0 , light_walls : bool = True , algo : int = FOV_RESTRICTIVE , ) -> None :
"""Compute the field - of - view for a map instance .
. . deprecated : : 4.5
Use : any : ` tcod . map . Map . compute _ fov ` instead ."""
|
m . compute_fov ( x , y , radius , light_walls , algo )
|
def project_events ( self , initial_state , domain_events ) :
"""Evolves initial state using the sequence of domain events and a mutator function ."""
|
return reduce ( self . _mutator_func or self . mutate , domain_events , initial_state )
|
def _set ( self , obj , value ) :
"""Internal method to set state , called by meth : ` StateTransition . _ _ call _ _ `"""
|
if value not in self . lenum :
raise ValueError ( "Not a valid value: %s" % value )
type ( obj ) . __dict__ [ self . propname ] . __set__ ( obj , value )
|
def delete_example ( self , example_id , url = 'https://api.shanbay.com/bdc/example/{example_id}/' ) :
"""删除例句"""
|
url = url . format ( example_id = example_id )
return self . _request ( url , method = 'delete' ) . json ( )
|
def derivative ( self , point ) :
r"""Derivative of this operator in ` ` point ` ` .
` ` NormOperator ( ) . derivative ( y ) ( x ) = = ( y / y . norm ( ) ) . inner ( x ) ` `
This is only applicable in inner product spaces .
Parameters
point : ` domain ` ` element - like `
Point in which to take the derivative .
Returns
derivative : ` InnerProductOperator `
Raises
ValueError
If ` ` point . norm ( ) = = 0 ` ` , in which case the derivative is not well
defined in the Frechet sense .
Notes
The derivative cannot be written in a general sense except in Hilbert
spaces , in which case it is given by
. . math : :
( D \ | \ cdot \ | ) ( y ) ( x ) = \ langle y / \ | y \ | , x \ rangle
Examples
> > > r3 = odl . rn ( 3)
> > > op = NormOperator ( r3)
> > > derivative = op . derivative ( [ 1 , 0 , 0 ] )
> > > derivative ( [ 1 , 0 , 0 ] )
1.0"""
|
point = self . domain . element ( point )
norm = point . norm ( )
if norm == 0 :
raise ValueError ( 'not differentiable in 0' )
return InnerProductOperator ( point / norm )
|
def sendNotification ( snmpEngine , authData , transportTarget , contextData , notifyType , * varBinds , ** options ) :
"""Creates a generator to send SNMP notification .
When iterator gets advanced by : py : mod : ` asyncio ` main loop ,
SNMP TRAP or INFORM notification is send ( : RFC : ` 1905 # section - 4.2.6 ` ) .
The iterator yields : py : class : ` asyncio . Future ` which gets done whenever
response arrives or error occurs .
Parameters
snmpEngine : : py : class : ` ~ pysnmp . hlapi . SnmpEngine `
Class instance representing SNMP engine .
authData : : py : class : ` ~ pysnmp . hlapi . CommunityData ` or : py : class : ` ~ pysnmp . hlapi . UsmUserData `
Class instance representing SNMP credentials .
transportTarget : : py : class : ` ~ pysnmp . hlapi . asyncio . UdpTransportTarget ` or : py : class : ` ~ pysnmp . hlapi . asyncio . Udp6TransportTarget `
Class instance representing transport type along with SNMP peer address .
contextData : : py : class : ` ~ pysnmp . hlapi . ContextData `
Class instance representing SNMP ContextEngineId and ContextName values .
notifyType : str
Indicates type of notification to be sent . Recognized literal
values are * trap * or * inform * .
\*varBinds: :class:`tuple` of OID-value pairs or :py:class:`~pysnmp.smi.rfc1902.ObjectType` or :py:class:`~pysnmp.smi.rfc1902.NotificationType`
One or more objects representing MIB variables to place
into SNMP notification . It could be tuples of OID - values
or : py : class : ` ~ pysnmp . smi . rfc1902 . ObjectType ` class instances
of : py : class : ` ~ pysnmp . smi . rfc1902 . NotificationType ` objects .
SNMP Notification PDU places rigid requirement on the ordering of
the variable - bindings .
Mandatory variable - bindings :
0 . SNMPv2 - MIB : : sysUpTime . 0 = < agent uptime >
1 . SNMPv2 - SMI : : snmpTrapOID . 0 = { SNMPv2 - MIB : : coldStart , . . . }
Optional variable - bindings ( applicable to SNMP v1 TRAP ) :
2 . SNMP - COMMUNITY - MIB : : snmpTrapAddress . 0 = < agent - IP >
3 . SNMP - COMMUNITY - MIB : : snmpTrapCommunity . 0 = < snmp - community - name >
4 . SNMP - COMMUNITY - MIB : : snmpTrapEnterprise . 0 = < enterprise - OID >
Informational variable - bindings :
* SNMPv2 - SMI : : NOTIFICATION - TYPE
* SNMPv2 - SMI : : OBJECT - TYPE
Other Parameters
\*\*options :
Request options :
* ` lookupMib ` - load MIB and resolve response MIB variables at
the cost of slightly reduced performance . Default is ` True ` .
Yields
errorIndication : str
True value indicates SNMP engine error .
errorStatus : str
True value indicates SNMP PDU error .
errorIndex : int
Non - zero value refers to ` varBinds [ errorIndex - 1 ] `
varBinds : tuple
A sequence of : py : class : ` ~ pysnmp . smi . rfc1902 . ObjectType ` class
instances representing MIB variables returned in SNMP response .
Raises
PySnmpError
Or its derivative indicating that an error occurred while
performing SNMP operation .
Examples
> > > import asyncio
> > > from pysnmp . hlapi . asyncio import *
> > > @ asyncio . coroutine
. . . def run ( ) :
. . . errorIndication , errorStatus , errorIndex , varBinds = yield from sendNotification (
. . . SnmpEngine ( ) ,
. . . CommunityData ( ' public ' ) ,
. . . UdpTransportTarget ( ( ' demo . snmplabs . com ' , 162 ) ) ,
. . . ContextData ( ) ,
. . . ' trap ' ,
. . . NotificationType ( ObjectIdentity ( ' IF - MIB ' , ' linkDown ' ) ) )
. . . print ( errorIndication , errorStatus , errorIndex , varBinds )
> > > asyncio . get _ event _ loop ( ) . run _ until _ complete ( run ( ) )
( None , 0 , 0 , [ ] )"""
|
def __cbFun ( snmpEngine , sendRequestHandle , errorIndication , errorStatus , errorIndex , varBinds , cbCtx ) :
lookupMib , future = cbCtx
if future . cancelled ( ) :
return
try :
varBindsUnmade = VB_PROCESSOR . unmakeVarBinds ( snmpEngine . cache , varBinds , lookupMib )
except Exception as e :
future . set_exception ( e )
else :
future . set_result ( ( errorIndication , errorStatus , errorIndex , varBindsUnmade ) )
notifyName = LCD . configure ( snmpEngine , authData , transportTarget , notifyType , contextData . contextName )
future = asyncio . Future ( )
ntforg . NotificationOriginator ( ) . sendVarBinds ( snmpEngine , notifyName , contextData . contextEngineId , contextData . contextName , VB_PROCESSOR . makeVarBinds ( snmpEngine . cache , varBinds ) , __cbFun , ( options . get ( 'lookupMib' , True ) , future ) )
if notifyType == 'trap' :
def __trapFun ( future ) :
if future . cancelled ( ) :
return
future . set_result ( ( None , 0 , 0 , [ ] ) )
loop = asyncio . get_event_loop ( )
loop . call_soon ( __trapFun , future )
return future
|
def _Pluralize ( value , unused_context , args ) :
"""Formatter to pluralize words ."""
|
if len ( args ) == 0 :
s , p = '' , 's'
elif len ( args ) == 1 :
s , p = '' , args [ 0 ]
elif len ( args ) == 2 :
s , p = args
else : # Should have been checked at compile time
raise AssertionError
if value > 1 :
return p
else :
return s
|
def get_items ( self ) :
"""Return items ( excluding top level items )"""
|
itemlist = [ ]
def add_to_itemlist ( item ) :
for index in range ( item . childCount ( ) ) :
citem = item . child ( index )
itemlist . append ( citem )
add_to_itemlist ( citem )
for tlitem in self . get_top_level_items ( ) :
add_to_itemlist ( tlitem )
return itemlist
|
def from_yaml ( self , path ) :
"""Register bundles from a YAML configuration file"""
|
bundles = YAMLLoader ( path ) . load_bundles ( )
for name in bundles :
self . register ( name , bundles [ name ] )
|
def read_very_lazy ( self ) :
"""Return any data available in the cooked queue ( very lazy ) .
Raise EOFError if connection closed and no data available .
Return ' ' if no cooked data available otherwise . Don ' t block ."""
|
buf = self . cookedq . getvalue ( )
self . cookedq . seek ( 0 )
self . cookedq . truncate ( )
if not buf and self . eof and not self . rawq :
raise EOFError ( 'telnet connection closed' )
return buf
|
def reassign_comment_to_book ( self , comment_id , from_book_id , to_book_id ) :
"""Moves a ` ` Credit ` ` from one ` ` Book ` ` to another .
Mappings to other ` ` Books ` ` are unaffected .
arg : comment _ id ( osid . id . Id ) : the ` ` Id ` ` of the ` ` Comment ` `
arg : from _ book _ id ( osid . id . Id ) : the ` ` Id ` ` of the current
` ` Book ` `
arg : to _ book _ id ( osid . id . Id ) : the ` ` Id ` ` of the destination
` ` Book ` `
raise : NotFound - ` ` comment _ id , from _ book _ id , ` ` or
` ` to _ book _ id ` ` not found or ` ` comment ` ` not mapped to
` ` from _ book _ id ` `
raise : NullArgument - ` ` comment _ id , book _ id _ id , ` ` or
` ` to _ book _ id ` ` is ` ` null ` `
raise : OperationFailed - unable to complete request
raise : PermissionDenied - authorization failure
* compliance : mandatory - - This method must be implemented . *"""
|
# Implemented from template for
# osid . resource . ResourceBinAssignmentSession . reassign _ resource _ to _ bin
self . assign_comment_to_book ( comment_id , to_book_id )
try :
self . unassign_comment_from_book ( comment_id , from_book_id )
except : # something went wrong , roll back assignment to to _ book _ id
self . unassign_comment_from_book ( comment_id , to_book_id )
raise
|
def _are_coordinates_valid ( self ) :
"""Check if the coordinates are valid .
: return : True if coordinates are valid otherwise False .
: type : bool"""
|
try :
QgsPointXY ( self . x_minimum . value ( ) , self . y_maximum . value ( ) )
QgsPointXY ( self . x_maximum . value ( ) , self . y_minimum . value ( ) )
except ValueError :
return False
return True
|
def write_temporary_file ( content , prefix = '' , suffix = '' ) :
"""Generating a temporary file with content .
Args :
content ( str ) : file content ( usually a script , Dockerfile , playbook or config file )
prefix ( str ) : the filename starts with this prefix ( default : no prefix )
suffix ( str ) : the filename ends with this suffix ( default : no suffix )
Returns :
str : name of the temporary file
Note :
You are responsible for the deletion of the file ."""
|
temp = tempfile . NamedTemporaryFile ( prefix = prefix , suffix = suffix , mode = 'w+t' , delete = False )
temp . writelines ( content )
temp . close ( )
return temp . name
|
def posterior ( self , x , s = 1. ) :
"""Model is X _ 1 , . . . , X _ n ~ N ( theta , s ^ 2 ) , theta ~ self , s fixed"""
|
pr0 = 1. / self . sigma ** 2
# prior precision
prd = x . size / s ** 2
# data precision
varp = 1. / ( pr0 + prd )
# posterior variance
mu = varp * ( pr0 * self . mu + prd * x . mean ( ) )
return TruncNormal ( mu = mu , sigma = np . sqrt ( varp ) , a = self . a , b = self . b )
|
def send ( self , value ) :
"""Send text to stdin . Can only be used on non blocking commands
Args :
value ( str ) : the text to write on stdin
Raises :
TypeError : If command is blocking
Returns :
ShellCommand : return this ShellCommand instance for chaining"""
|
if not self . block and self . _stdin is not None :
self . writer . write ( "{}\n" . format ( value ) )
return self
else :
raise TypeError ( NON_BLOCKING_ERROR_MESSAGE )
|
def mutate_label ( label ) :
"""BigQuery field _ name should start with a letter or underscore and contain only
alphanumeric characters . Labels that start with a number are prefixed with an
underscore . Any unsupported characters are replaced with underscores and an
md5 hash is added to the end of the label to avoid possible collisions .
: param str label : the original label which might include unsupported characters
: return : String that is supported by the database"""
|
label_hashed = '_' + hashlib . md5 ( label . encode ( 'utf-8' ) ) . hexdigest ( )
# if label starts with number , add underscore as first character
label_mutated = '_' + label if re . match ( r'^\d' , label ) else label
# replace non - alphanumeric characters with underscores
label_mutated = re . sub ( r'[^\w]+' , '_' , label_mutated )
if label_mutated != label : # add md5 hash to label to avoid possible collisions
label_mutated += label_hashed
return label_mutated
|
def setRedYellowGreenState ( self , tlsID , state ) :
"""setRedYellowGreenState ( string , string ) - > None
Sets the named tl ' s state as a tuple of light definitions from
rugGyYuoO , for red , red - yellow , green , yellow , off , where lower case letters mean that the stream has to decelerate ."""
|
self . _connection . _sendStringCmd ( tc . CMD_SET_TL_VARIABLE , tc . TL_RED_YELLOW_GREEN_STATE , tlsID , state )
|
def get_Object_or_None ( klass , * args , ** kwargs ) :
"""Uses get ( ) to return an object , or None if the object does not exist .
klass may be a Model , Manager , or QuerySet object . All other passed
arguments and keyword arguments are used in the get ( ) query .
Note : Like with get ( ) , an MultipleObjectsReturned will be raised
if more than one object is found .
Ex : get _ Object _ or _ None ( User , db , id = 1)"""
|
queryset = _get_queryset ( klass )
try :
if args :
return queryset . using ( args [ 0 ] ) . get ( ** kwargs )
else :
return queryset . get ( * args , ** kwargs )
except queryset . model . DoesNotExist :
return None
|
def _server_property ( self , attr_name ) :
"""An attribute of the current server ' s description .
If the client is not connected , this will block until a connection is
established or raise ServerSelectionTimeoutError if no server is
available .
Not threadsafe if used multiple times in a single method , since
the server may change . In such cases , store a local reference to a
ServerDescription first , then use its properties ."""
|
server = self . _topology . select_server ( writable_server_selector )
return getattr ( server . description , attr_name )
|
def mpim_close ( self , * , channel : str , ** kwargs ) -> SlackResponse :
"""Closes a multiparty direct message channel .
Args :
channel ( str ) : Multiparty Direct message channel to close . e . g . ' G1234567890'"""
|
kwargs . update ( { "channel" : channel } )
return self . api_call ( "mpim.close" , json = kwargs )
|
def plot_diagrams ( results , configs , compiler , out_dir ) :
"""Plot all diagrams specified by the configs"""
|
compiler_fn = make_filename ( compiler )
total = psutil . virtual_memory ( ) . total
# pylint : disable = I0011 , E1101
memory = int ( math . ceil ( byte_to_gb ( total ) ) )
images_dir = os . path . join ( out_dir , 'images' )
for config in configs :
out_prefix = '{0}_{1}' . format ( config [ 'name' ] , compiler_fn )
plot_diagram ( config , results , images_dir , os . path . join ( images_dir , '{0}.png' . format ( out_prefix ) ) )
with open ( os . path . join ( out_dir , '{0}.qbk' . format ( out_prefix ) ) , 'wb' ) as out_f :
qbk_content = """{0}
Measured on a {2} host with {3} GB memory. Compiler used: {4}.
[$images/metaparse/{1}.png [width 100%]]
""" . format ( config [ 'desc' ] , out_prefix , platform . platform ( ) , memory , compiler )
out_f . write ( qbk_content )
|
def info ( text , * args , ** kwargs ) :
'''Display informations'''
|
text = text . format ( * args , ** kwargs )
print ( ' ' . join ( ( purple ( '>>>' ) , text ) ) )
sys . stdout . flush ( )
|
def last_available_business_date ( self , asset_manager_id , asset_ids , page_no = None , page_size = None ) :
"""Returns the last available business date for the assets so we know the
starting date for new data which needs to be downloaded from data providers .
This method can only be invoked by system user"""
|
self . logger . info ( 'Retrieving last available business dates for assets' )
url = '%s/last-available-business-date' % self . endpoint
params = { 'asset_manager_ids' : [ asset_manager_id ] , 'asset_ids' : ',' . join ( asset_ids ) }
if page_no :
params [ 'page_no' ] = page_no
if page_size :
params [ 'page_size' ] = page_size
response = self . session . get ( url , params = params )
if response . ok :
self . logger . info ( "Received %s assets' last available business date" , len ( response . json ( ) ) )
return response . json ( )
else :
self . logger . error ( response . text )
response . raise_for_status ( )
|
def Lehrer ( m , Dtank , Djacket , H , Dinlet , rho , Cp , k , mu , muw = None , isobaric_expansion = None , dT = None , inlettype = 'tangential' , inletlocation = 'auto' ) :
r'''Calculates average heat transfer coefficient for a jacket around a
vessel according to [ 1 ] _ as described in [ 2 ] _ .
. . math : :
Nu _ { S , L } = \ left [ \ frac { 0.03Re _ S ^ { 0.75 } Pr } { 1 + \ frac { 1.74 ( Pr - 1 ) }
{ Re _ S ^ { 0.125 } } } \ right ] \ left ( \ frac { \ mu } { \ mu _ w } \ right ) ^ { 0.14}
d _ g = \ left ( \ frac { 8 } { 3 } \ right ) ^ { 0.5 } \ delta
v _ h = ( v _ Sv _ { inlet } ) ^ { 0.5 } + v _ A
v _ { inlet } = \ frac { Q } { \ frac { \ pi } { 4 } d _ { inlet } ^ 2}
v _ s = \ frac { Q } { \ frac { \ pi } { 4 } ( D _ { jacket } ^ 2 - D _ { tank } ^ 2 ) }
For Radial inlets :
. . math : :
v _ A = 0.5(2g H \ beta \ delta \ Delta T ) ^ { 0.5}
For Tangential inlets :
. . math : :
v _ A = 0
Parameters
m : float
Mass flow rate of fluid , [ kg / s ]
Dtank : float
Outer diameter of tank or vessel surrounded by jacket , [ m ]
Djacket : float
Inner diameter of jacket surrounding a vessel or tank , [ m ]
H : float
Height of the vessel or tank , [ m ]
Dinlet : float
Inner diameter of inlet into the jacket , [ m ]
rho : float
Density of the fluid at Tm [ kg / m ^ 3]
Cp : float
Heat capacity of fluid at Tm [ J / kg / K ]
k : float
Thermal conductivity of fluid at Tm [ W / m / K ]
mu : float
Viscosity of fluid at Tm [ Pa * s ]
muw : float , optional
Viscosity of fluid at Tw [ Pa * s ]
isobaric _ expansion : float , optional
Constant pressure expansivity of a fluid , [ m ^ 3 / mol / K ]
dT : float , optional
Temperature difference of fluid in jacket , [ K ]
inlettype : str , optional
Either ' tangential ' or ' radial '
inletlocation : str , optional
Either ' top ' or ' bottom ' or ' auto '
Returns
h : float
Average heat transfer coefficient inside the jacket [ W / m ^ 2 / K ]
Notes
If the fluid is heated and enters from the bottom , natural convection
assists the heat transfer and the Grashof term is added ; if it were to enter
from the top , it would be subtracted . The situation is reversed if entry
is from the top .
Examples
Example as in [ 2 ] _ , matches completely .
> > > Lehrer ( m = 2.5 , Dtank = 0.6 , Djacket = 0.65 , H = 0.6 , Dinlet = 0.025 , dT = 20 . ,
. . . rho = 995.7 , Cp = 4178.1 , k = 0.615 , mu = 798E - 6 , muw = 355E - 6)
2922.128124761829
Examples similar to in [ 2 ] _ but covering the other case :
> > > Lehrer ( m = 2.5 , Dtank = 0.6 , Djacket = 0.65 , H = 0.6 , Dinlet = 0.025 , dT = 20 . ,
. . . rho = 995.7 , Cp = 4178.1 , k = 0.615 , mu = 798E - 6 , muw = 355E - 6,
. . . inlettype = ' radial ' , isobaric _ expansion = 0.000303)
3269.4389632666557
References
. . [ 1 ] Lehrer , Isaac H . " Jacket - Side Nusselt Number . " Industrial &
Engineering Chemistry Process Design and Development 9 , no . 4
( October 1 , 1970 ) : 553-58 . doi : 10.1021 / i260036a010.
. . [ 2 ] Gesellschaft , V . D . I . , ed . VDI Heat Atlas . 2nd edition .
Berlin ; New York : : Springer , 2010.'''
|
delta = ( Djacket - Dtank ) / 2.
Q = m / rho
Pr = Cp * mu / k
vs = Q / H / delta
vo = Q / ( pi / 4 * Dinlet ** 2 )
if dT and isobaric_expansion and inlettype == 'radial' and inletlocation :
if dT > 0 : # Heating jacket fluid
if inletlocation == 'auto' or inletlocation == 'bottom' :
va = 0.5 * ( 2 * g * H * isobaric_expansion * abs ( dT ) ) ** 0.5
else :
va = - 0.5 * ( 2 * g * H * isobaric_expansion * abs ( dT ) ) ** 0.5
else : # cooling fluid
if inletlocation == 'auto' or inletlocation == 'top' :
va = 0.5 * ( 2 * g * H * isobaric_expansion * abs ( dT ) ) ** 0.5
else :
va = - 0.5 * ( 2 * g * H * isobaric_expansion * abs ( dT ) ) ** 0.5
else :
va = 0
vh = ( vs * vo ) ** 0.5 + va
dg = ( 8 / 3. ) ** 0.5 * delta
Res = vh * dg * rho / mu
if muw :
NuSL = ( 0.03 * Res ** 0.75 * Pr ) / ( 1 + 1.74 * ( Pr - 1 ) / Res ** 0.125 ) * ( mu / muw ) ** 0.14
else :
NuSL = ( 0.03 * Res ** 0.75 * Pr ) / ( 1 + 1.74 * ( Pr - 1 ) / Res ** 0.125 )
return NuSL * k / dg
|
def handle_transport_fail ( self , exception = None , ** kwargs ) :
"""Failure handler called by the transport on send failure"""
|
message = str ( exception )
logger . error ( "Failed to submit message: %r" , message , exc_info = getattr ( exception , "print_trace" , True ) )
self . state . set_fail ( )
|
def load_retaildata ( ) :
"""Monthly retail trade data from census . gov ."""
|
# full = ' https : / / www . census . gov / retail / mrts / www / mrtssales92 - present . xls '
# indiv = ' https : / / www . census . gov / retail / marts / www / timeseries . html '
db = { "Auto, other Motor Vehicle" : "https://www.census.gov/retail/marts/www/adv441x0.txt" , "Building Material and Garden Equipment and Supplies Dealers" : "https://www.census.gov/retail/marts/www/adv44400.txt" , "Clothing and Clothing Accessories Stores" : "https://www.census.gov/retail/marts/www/adv44800.txt" , "Dept. Stores (ex. leased depts)" : "https://www.census.gov/retail/marts/www/adv45210.txt" , "Electronics and Appliance Stores" : "https://www.census.gov/retail/marts/www/adv44300.txt" , "Food Services and Drinking Places" : "https://www.census.gov/retail/marts/www/adv72200.txt" , "Food and Beverage Stores" : "https://www.census.gov/retail/marts/www/adv44500.txt" , "Furniture and Home Furnishings Stores" : "https://www.census.gov/retail/marts/www/adv44200.txt" , "Gasoline Stations" : "https://www.census.gov/retail/marts/www/adv44700.txt" , "General Merchandise Stores" : "https://www.census.gov/retail/marts/www/adv45200.txt" , "Grocery Stores" : "https://www.census.gov/retail/marts/www/adv44510.txt" , "Health and Personal Care Stores" : "https://www.census.gov/retail/marts/www/adv44600.txt" , "Miscellaneous Store Retailers" : "https://www.census.gov/retail/marts/www/adv45300.txt" , "Motor Vehicle and Parts Dealers" : "https://www.census.gov/retail/marts/www/adv44100.txt" , "Nonstore Retailers" : "https://www.census.gov/retail/marts/www/adv45400.txt" , "Retail and Food Services, total" : "https://www.census.gov/retail/marts/www/adv44x72.txt" , "Retail, total" : "https://www.census.gov/retail/marts/www/adv44000.txt" , "Sporting Goods, Hobby, Book, and Music Stores" : "https://www.census.gov/retail/marts/www/adv45100.txt" , "Total (excl. Motor Vehicle)" : "https://www.census.gov/retail/marts/www/adv44y72.txt" , "Retail (excl. Motor Vehicle and Parts Dealers)" : "https://www.census.gov/retail/marts/www/adv4400a.txt" , }
dct = { }
for key , value in db . items ( ) :
data = pd . read_csv ( value , skiprows = 5 , skip_blank_lines = True , header = None , sep = "\s+" , index_col = 0 , )
try :
cut = data . index . get_loc ( "SEASONAL" )
except KeyError :
cut = data . index . get_loc ( "NO" )
data = data . iloc [ : cut ]
data = data . apply ( lambda col : pd . to_numeric ( col , downcast = "float" ) )
data = data . stack ( )
year = data . index . get_level_values ( 0 )
month = data . index . get_level_values ( 1 )
idx = pd . to_datetime ( { "year" : year , "month" : month , "day" : 1 } ) + offsets . MonthEnd ( 1 )
data . index = idx
data . name = key
dct [ key ] = data
sales = pd . DataFrame ( dct )
sales = sales . reindex ( pd . date_range ( sales . index [ 0 ] , sales . index [ - 1 ] , freq = "M" ) )
# TODO : account for any skipped months ; could specify a DateOffset to
# ` freq ` param of ` pandas . DataFrame . shift `
yoy = sales . pct_change ( periods = 12 )
return sales , yoy
|
def _edges2conns ( G , edge_data = False ) :
"""Create a mapping from graph edges to agent connections to be created .
: param G :
NetworkX ' s Graph or DiGraph which has : attr : ` addr ` attribute for each
node .
: param bool edge _ data :
If ` ` True ` ` , stores also edge data to the returned dictionary .
: returns :
A dictionary where keys are agent addresses and values are lists of
addresses to which key - agent should create connections in order to
recreate the graph structure in an agent society .
: rtype : dict"""
|
cm = { }
for n in G . nodes ( data = True ) :
if edge_data :
cm [ n [ 1 ] [ 'addr' ] ] = [ ( G . node [ nb ] [ 'addr' ] , G [ n [ 0 ] ] [ nb ] ) for nb in G [ n [ 0 ] ] ]
else :
cm [ n [ 1 ] [ 'addr' ] ] = [ ( G . node [ nb ] [ 'addr' ] , { } ) for nb in G [ n [ 0 ] ] ]
return cm
|
def validate_regexp ( pattern , flags = 0 ) :
"""Validate the field matches the given regular expression .
Should work with anything that supports ' = = ' operator .
: param pattern : Regular expresion to match . String or regular expression instance .
: param pattern : Flags for the regular expression .
: raises : ` ` ValidationError ( ' equal ' ) ` `"""
|
regex = re . compile ( pattern , flags ) if isinstance ( pattern , str ) else pattern
def regexp_validator ( field , data ) :
if field . value is None :
return
if regex . match ( str ( field . value ) ) is None :
raise ValidationError ( 'regexp' , pattern = pattern )
return regexp_validator
|
def _pack ( self , content ) :
"""pack the content using serializer and compressor"""
|
if self . serializer :
content = self . serializer . serialize ( content )
if self . compressor :
content = self . compressor . compress ( content )
return content
|
def _ellipse_phantom_2d ( space , ellipses ) :
"""Create a phantom of ellipses in 2d space .
Parameters
space : ` DiscreteLp `
Uniformly discretized space in which the phantom should be generated .
If ` ` space . shape ` ` is 1 in an axis , a corresponding slice of the
phantom is created ( instead of squashing the whole phantom into the
slice ) .
ellipses : list of lists
Each row should contain the entries : :
' value ' ,
' axis _ 1 ' , ' axis _ 2 ' ,
' center _ x ' , ' center _ y ' ,
' rotation '
The provided ellipses need to be specified relative to the
reference rectangle ` ` [ - 1 , - 1 ] x [ 1 , 1 ] ` ` . Angles are to be given
in radians .
Returns
phantom : ` ` space ` ` element
2D ellipse phantom in ` ` space ` ` .
See Also
shepp _ logan : The typical use - case for this function ."""
|
# Blank image
p = np . zeros ( space . shape , dtype = space . dtype )
minp = space . grid . min_pt
maxp = space . grid . max_pt
# Create the pixel grid
grid_in = space . grid . meshgrid
# move points to [ - 1 , 1]
grid = [ ]
for i in range ( 2 ) :
mean_i = ( minp [ i ] + maxp [ i ] ) / 2.0
# Where space . shape = 1 , we have minp = maxp , so we set diff _ i = 1
# to avoid division by zero . Effectively , this allows constructing
# a slice of a 2D phantom .
diff_i = ( maxp [ i ] - minp [ i ] ) / 2.0 or 1.0
grid . append ( ( grid_in [ i ] - mean_i ) / diff_i )
for ellip in ellipses :
assert len ( ellip ) == 6
intensity = ellip [ 0 ]
a_squared = ellip [ 1 ] ** 2
b_squared = ellip [ 2 ] ** 2
x0 = ellip [ 3 ]
y0 = ellip [ 4 ]
theta = ellip [ 5 ]
scales = [ 1 / a_squared , 1 / b_squared ]
center = ( np . array ( [ x0 , y0 ] ) + 1.0 ) / 2.0
# Create the offset x , y and z values for the grid
if theta != 0 : # Rotate the points to the expected coordinate system .
ctheta = np . cos ( theta )
stheta = np . sin ( theta )
mat = np . array ( [ [ ctheta , stheta ] , [ - stheta , ctheta ] ] )
# Calculate the points that could possibly be inside the volume
# Since the points are rotated , we cannot do anything directional
# without more logic
max_radius = np . sqrt ( np . abs ( mat ) . dot ( [ a_squared , b_squared ] ) )
idx , shapes = _getshapes_2d ( center , max_radius , space . shape )
subgrid = [ g [ idi ] for g , idi in zip ( grid , shapes ) ]
offset_points = [ vec * ( xi - x0i ) [ ... , None ] for xi , vec , x0i in zip ( subgrid , mat . T , [ x0 , y0 ] ) ]
rotated = offset_points [ 0 ] + offset_points [ 1 ]
np . square ( rotated , out = rotated )
radius = np . dot ( rotated , scales )
else : # Calculate the points that could possibly be inside the volume
max_radius = np . sqrt ( [ a_squared , b_squared ] )
idx , shapes = _getshapes_2d ( center , max_radius , space . shape )
subgrid = [ g [ idi ] for g , idi in zip ( grid , shapes ) ]
squared_dist = [ ai * ( xi - x0i ) ** 2 for xi , ai , x0i in zip ( subgrid , scales , [ x0 , y0 ] ) ]
# Parentheses to get best order for broadcasting
radius = squared_dist [ 0 ] + squared_dist [ 1 ]
# Find the points within the ellipse
inside = radius <= 1
# Add the ellipse intensity to those points
p [ idx ] [ inside ] += intensity
return space . element ( p )
|
def stem ( self , word ) :
"""Perform stemming on an input word ."""
|
if self . stemmer :
return unicode_to_ascii ( self . _stemmer . stem ( word ) )
else :
return word
|
def add ( self , items , force = True , fprogress = lambda * args : None , path_rewriter = None , write = True , write_extension_data = False ) :
"""Add files from the working tree , specific blobs or BaseIndexEntries
to the index .
: param items :
Multiple types of items are supported , types can be mixed within one call .
Different types imply a different handling . File paths may generally be
relative or absolute .
- path string
strings denote a relative or absolute path into the repository pointing to
an existing file , i . e . CHANGES , lib / myfile . ext , ' / home / gitrepo / lib / myfile . ext ' .
Absolute paths must start with working tree directory of this index ' s repository
to be considered valid . For example , if it was initialized with a non - normalized path , like
` / root / repo / . . / repo ` , absolute paths to be added must start with ` / root / repo / . . / repo ` .
Paths provided like this must exist . When added , they will be written
into the object database .
PathStrings may contain globs , such as ' lib / _ _ init _ _ * ' or can be directories
like ' lib ' , the latter ones will add all the files within the dirctory and
subdirectories .
This equals a straight git - add .
They are added at stage 0
- Blob or Submodule object
Blobs are added as they are assuming a valid mode is set .
The file they refer to may or may not exist in the file system , but
must be a path relative to our repository .
If their sha is null ( 40*0 ) , their path must exist in the file system
relative to the git repository as an object will be created from
the data at the path .
The handling now very much equals the way string paths are processed , except that
the mode you have set will be kept . This allows you to create symlinks
by settings the mode respectively and writing the target of the symlink
directly into the file . This equals a default Linux - Symlink which
is not dereferenced automatically , except that it can be created on
filesystems not supporting it as well .
Please note that globs or directories are not allowed in Blob objects .
They are added at stage 0
- BaseIndexEntry or type
Handling equals the one of Blob objects , but the stage may be
explicitly set . Please note that Index Entries require binary sha ' s .
: param force :
* * CURRENTLY INEFFECTIVE * *
If True , otherwise ignored or excluded files will be
added anyway .
As opposed to the git - add command , we enable this flag by default
as the API user usually wants the item to be added even though
they might be excluded .
: param fprogress :
Function with signature f ( path , done = False , item = item ) called for each
path to be added , one time once it is about to be added where done = = False
and once after it was added where done = True .
item is set to the actual item we handle , either a Path or a BaseIndexEntry
Please note that the processed path is not guaranteed to be present
in the index already as the index is currently being processed .
: param path _ rewriter :
Function with signature ( string ) func ( BaseIndexEntry ) function returning a path
for each passed entry which is the path to be actually recorded for the
object created from entry . path . This allows you to write an index which
is not identical to the layout of the actual files on your hard - disk .
If not None and ` ` items ` ` contain plain paths , these paths will be
converted to Entries beforehand and passed to the path _ rewriter .
Please note that entry . path is relative to the git repository .
: param write :
If True , the index will be written once it was altered . Otherwise
the changes only exist in memory and are not available to git commands .
: param write _ extension _ data :
If True , extension data will be written back to the index . This can lead to issues in case
it is containing the ' TREE ' extension , which will cause the ` git commit ` command to write an
old tree , instead of a new one representing the now changed index .
This doesn ' t matter if you use ` IndexFile . commit ( ) ` , which ignores the ` TREE ` extension altogether .
You should set it to True if you intend to use ` IndexFile . commit ( ) ` exclusively while maintaining
support for third - party extensions . Besides that , you can usually safely ignore the built - in
extensions when using GitPython on repositories that are not handled manually at all .
All current built - in extensions are listed here :
http : / / opensource . apple . com / source / Git / Git - 26 / src / git - htmldocs / technical / index - format . txt
: return :
List ( BaseIndexEntries ) representing the entries just actually added .
: raise OSError :
if a supplied Path did not exist . Please note that BaseIndexEntry
Objects that do not have a null sha will be added even if their paths
do not exist ."""
|
# sort the entries into strings and Entries , Blobs are converted to entries
# automatically
# paths can be git - added , for everything else we use git - update - index
paths , entries = self . _preprocess_add_items ( items )
entries_added = [ ]
# This code needs a working tree , therefore we try not to run it unless required .
# That way , we are OK on a bare repository as well .
# If there are no paths , the rewriter has nothing to do either
if paths :
entries_added . extend ( self . _entries_for_paths ( paths , path_rewriter , fprogress , entries ) )
# HANDLE ENTRIES
if entries :
null_mode_entries = [ e for e in entries if e . mode == 0 ]
if null_mode_entries :
raise ValueError ( "At least one Entry has a null-mode - please use index.remove to remove files for clarity" )
# END null mode should be remove
# HANLDE ENTRY OBJECT CREATION
# create objects if required , otherwise go with the existing shas
null_entries_indices = [ i for i , e in enumerate ( entries ) if e . binsha == Object . NULL_BIN_SHA ]
if null_entries_indices :
@ git_working_dir
def handle_null_entries ( self ) :
for ei in null_entries_indices :
null_entry = entries [ ei ]
new_entry = self . _store_path ( null_entry . path , fprogress )
# update null entry
entries [ ei ] = BaseIndexEntry ( ( null_entry . mode , new_entry . binsha , null_entry . stage , null_entry . path ) )
# END for each entry index
# end closure
handle_null_entries ( self )
# END null _ entry handling
# REWRITE PATHS
# If we have to rewrite the entries , do so now , after we have generated
# all object sha ' s
if path_rewriter :
for i , e in enumerate ( entries ) :
entries [ i ] = BaseIndexEntry ( ( e . mode , e . binsha , e . stage , path_rewriter ( e ) ) )
# END for each entry
# END handle path rewriting
# just go through the remaining entries and provide progress info
for i , entry in enumerate ( entries ) :
progress_sent = i in null_entries_indices
if not progress_sent :
fprogress ( entry . path , False , entry )
fprogress ( entry . path , True , entry )
# END handle progress
# END for each enty
entries_added . extend ( entries )
# END if there are base entries
# FINALIZE
# add the new entries to this instance
for entry in entries_added :
self . entries [ ( entry . path , 0 ) ] = IndexEntry . from_base ( entry )
if write :
self . write ( ignore_extension_data = not write_extension_data )
# END handle write
return entries_added
|
def Mult ( self , x , factor ) :
"""Scales the freq / prob associated with the value x .
Args :
x : number value
factor : how much to multiply by"""
|
self . d [ x ] = self . d . get ( x , 0 ) * factor
|
def get_session ( self ) :
"""Create Session to store credentials .
Returns
( Session )
A Session object with OAuth 2.0 credentials ."""
|
response = request_access_token ( grant_type = auth . CLIENT_CREDENTIAL_GRANT , client_id = self . client_id , client_secret = self . client_secret , scopes = self . scopes , )
oauth2credential = OAuth2Credential . make_from_response ( response = response , grant_type = auth . CLIENT_CREDENTIAL_GRANT , client_id = self . client_id , client_secret = self . client_secret , )
return Session ( oauth2credential = oauth2credential )
|
def _validate ( self ) :
'''Internal function to validate the transformer before applying all internal transformers .'''
|
if self . f_labels is None :
raise NotFittedError ( 'FeatureRepMix' )
if not self . transformers :
return
names , transformers , _ = zip ( * self . transformers )
# validate names
self . _validate_names ( names )
# validate transformers
for trans in transformers :
if not isinstance ( trans , FeatureRep ) :
raise TypeError ( "All transformers must be an instance of FeatureRep." " '%s' (type %s) doesn't." % ( trans , type ( trans ) ) )
|
def smooth_shakemap ( shakemap_layer_path , output_file_path = '' , active_band = 1 , smoothing_method = NUMPY_SMOOTHING , smoothing_sigma = 0.9 ) :
"""Make a smoother shakemap layer from a shake map .
: param shakemap _ layer _ path : The shake map raster layer path .
: type shakemap _ layer _ path : basestring
: param active _ band : The band which the data located , default to 1.
: type active _ band : int
: param smoothing _ method : The smoothing method that wanted to be used .
: type smoothing _ method : NONE _ SMOOTHING , NUMPY _ SMOOTHING , SCIPY _ SMOOTHING
: param smooth _ sigma : parameter for gaussian filter used in smoothing
function .
: type smooth _ sigma : float
: returns : The contour of the shake map layer .
: rtype : QgsRasterLayer"""
|
# Set output path
if not output_file_path :
output_file_path = unique_filename ( suffix = '.tiff' , dir = temp_dir ( ) )
# convert to numpy
shakemap_file = gdal . Open ( shakemap_layer_path )
shakemap_array = np . array ( shakemap_file . GetRasterBand ( active_band ) . ReadAsArray ( ) )
# do smoothing
if smoothing_method == NUMPY_SMOOTHING :
smoothed_array = convolve ( shakemap_array , gaussian_kernel ( smoothing_sigma ) )
else :
smoothed_array = shakemap_array
# Create smoothed shakemap raster layer
driver = gdal . GetDriverByName ( 'GTiff' )
smoothed_shakemap_file = driver . Create ( output_file_path , shakemap_file . RasterXSize , shakemap_file . RasterYSize , 1 , gdal . GDT_Float32 # Important , since the default is integer
)
smoothed_shakemap_file . GetRasterBand ( 1 ) . WriteArray ( smoothed_array )
# CRS
smoothed_shakemap_file . SetProjection ( shakemap_file . GetProjection ( ) )
smoothed_shakemap_file . SetGeoTransform ( shakemap_file . GetGeoTransform ( ) )
smoothed_shakemap_file . FlushCache ( )
del smoothed_shakemap_file
if not os . path . isfile ( output_file_path ) :
raise FileNotFoundError ( tr ( 'The smoothed shakemap is not created. It should be at ' '{output_file_path}' . format ( output_file_path = output_file_path ) ) )
return output_file_path
|
def _resolve_placeholders ( self ) :
"""Resolve objects that have been imported from elsewhere ."""
|
modules = { }
for module in self . paths . values ( ) :
children = { child [ "name" ] : child for child in module [ "children" ] }
modules [ module [ "name" ] ] = ( module , children )
resolved = set ( )
for module_name in modules :
visit_path = collections . OrderedDict ( )
_resolve_module_placeholders ( modules , module_name , visit_path , resolved )
|
def json_serial ( obj ) :
"""Custom JSON serializer for objects not serializable by default ."""
|
if isinstance ( obj , ( datetime . datetime , datetime . date ) ) :
return obj . isoformat ( )
raise TypeError ( 'Type {} not serializable.' . format ( type ( obj ) ) )
|
def conv2d ( self , filter_size , output_channels , stride = 1 , padding = 'SAME' , stoch = None , bn = True , test = False , activation_fn = tf . nn . relu , b_value = 0.0 , s_value = 1.0 ) :
"""2D Convolutional Layer .
: param filter _ size : int . assumes square filter
: param output _ channels : int
: param stride : int
: param padding : ' VALID ' or ' SAME '
: param activation _ fn : tf . nn function
: param b _ value : float
: param s _ value : float"""
|
self . count [ 'conv' ] += 1
scope = 'conv_' + str ( self . count [ 'conv' ] )
with tf . variable_scope ( scope ) : # Conv function
input_channels = self . input . get_shape ( ) [ 3 ]
if filter_size == 0 : # outputs a 1x1 feature map ; used for FCN
filter_size = self . input . get_shape ( ) [ 2 ]
padding = 'VALID'
output_shape = [ filter_size , filter_size , input_channels , output_channels ]
w = self . weight_variable ( name = 'weights' , shape = output_shape )
self . input = tf . nn . conv2d ( self . input , w , strides = [ 1 , stride , stride , 1 ] , padding = padding )
# Additional functions
if stoch is not None : # Draw feature map values from a normal distribution
if stoch == 'N' : # Normal
output_shape = [ 3 , 3 , output_channels , 1 ]
w2 = self . weight_variable ( name = 'weights_mean' , shape = output_shape )
mean = tf . nn . conv2d ( self . input , w2 , strides = [ 1 , 1 , 1 , 1 ] , padding = padding )
w3 = self . weight_variable ( name = 'weights_std' , shape = output_shape )
std = tf . nn . conv2d ( self . input , w3 , strides = [ 1 , 1 , 1 , 1 ] , padding = padding )
map_size = tf . pack ( [ mean . get_shape ( ) [ 1 ] , mean . get_shape ( ) [ 2 ] ] )
z = mean + tf . random_normal ( map_size ) * std
if stoch == 'B' : # Bernoulli
mean = 0
map_size = tf . pack ( [ mean . get_shape ( ) [ 1 ] , mean . get_shape ( ) [ 2 ] ] )
with tf . variable_scope ( "stoch" ) :
output_shape = tf . pack ( [ self . input . get_shape ( ) [ 1 ] , self . input . get_shape ( ) [ 2 ] , 1 , 1 ] )
w3 = self . weight_variable ( name = 'weights_refinement' , shape = output_shape )
self . input = self . input + z * w3
if bn is True : # batch normalization
self . input = self . batch_norm ( self . input )
if b_value is not None : # bias value
b = self . const_variable ( name = 'bias' , shape = [ output_channels ] , value = b_value )
self . input = tf . add ( self . input , b )
if s_value is not None : # scale value
s = self . const_variable ( name = 'scale' , shape = [ output_channels ] , value = s_value )
self . input = tf . multiply ( self . input , s )
if activation_fn is not None : # activation function
self . input = activation_fn ( self . input )
self . print_log ( scope + ' output: ' + str ( self . input . get_shape ( ) ) )
|
def writefile ( filename , data , binary = False ) :
"""Write the provided data to the file .
` filename `
Filename to write .
` data `
Data buffer to write .
` binary `
Set to ` ` True ` ` to indicate a binary file .
Returns boolean ."""
|
try :
flags = 'w' if not binary else 'wb'
with open ( filename , flags ) as _file :
_file . write ( data )
_file . flush ( )
return True
except ( OSError , IOError ) :
return False
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.