signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def update ( self , lease_time = None ) :
'''Refresh this task ' s expiration time .
This tries to set the task ' s expiration time to the current
time , plus ` lease _ time ` seconds . It requires the job to not
already be complete . If ` lease _ time ` is negative , makes the
job immediately be available for other workers to run .
: param int lease _ time : time to extend job lease beyond now
: raises rejester . exceptions . LostLease : if the lease has already
expired''' | if lease_time is None :
lease_time = self . default_lifetime
with self . registry . lock ( identifier = self . worker_id ) as session :
self . _refresh ( session )
try :
self . expires = time . time ( ) + lease_time
session . update ( WORK_UNITS_ + self . work_spec_name , { self . key : self . data } , priorities = { self . key : self . expires } , locks = { self . key : self . worker_id } )
except EnvironmentError , exc :
raise LostLease ( exc ) |
def refs ( self ) :
"""Iterate over downloadable sources - - references and templates""" | def set_bundle ( s ) :
s . _bundle = self
return s
return list ( set_bundle ( s ) for s in self . dataset . sources if not s . is_downloadable ) |
def ftpparse ( line ) :
"""Parse a FTP list line into a dictionary with attributes :
name - name of file ( string )
trycwd - False if cwd is definitely pointless , True otherwise
tryretr - False if retr is definitely pointless , True otherwise
If the line has no file information , None is returned""" | if len ( line ) < 2 : # an empty name in EPLF , with no info , could be 2 chars
return None
info = dict ( name = None , trycwd = False , tryretr = False )
# EPLF format
# http : / / pobox . com / ~ djb / proto / eplf . html
# " + i8388621.29609 , m824255902 , / , \ tdev "
# " + i8388621.44468 , m839956783 , r , s10376 , \ tRFCEPLF "
if line [ 0 ] == '+' :
if '\t' in line :
flags , name = line . split ( '\t' , 1 )
info [ 'name' ] = name
flags = flags . split ( ',' )
info [ 'trycwd' ] = '/' in flags
info [ 'tryretr' ] = 'r' in flags
return info
# UNIX - style listing , without inum and without blocks
# " - rw - r - - r - - 1 root other 531 Jan 29 03:26 README "
# " dr - xr - xr - x 2 root other 512 Apr 8 1994 etc "
# " dr - xr - xr - x 2 root 512 Apr 8 1994 etc "
# " lrwxrwxrwx 1 root other 7 Jan 25 00:17 bin - > usr / bin "
# Also produced by Microsoft ' s FTP servers for Windows :
# " - - - - - 1 owner group 1803128 Jul 10 10:18 ls - lR . Z "
# " d - - - - - 1 owner group 0 May 9 19:45 Softlib "
# Also WFTPD for MSDOS :
# " - rwxrwxrwx 1 noone nogroup 322 Aug 19 1996 message . ftp "
# Also NetWare :
# " d [ R - - - - F - - ] supervisor 512 Jan 16 18:53 login "
# " - [ R - - - - F - - ] rhesus 214059 Oct 20 15:27 cx . exe "
# Also NetPresenz for the Mac :
# " - - - - - r - - 326 1391972 1392298 Nov 22 1995 MegaPhone . sit "
# " drwxrwxr - x folder 2 May 10 1996 network "
if line [ 0 ] in 'bcdlps-' :
if line [ 0 ] == 'd' :
info [ 'trycwd' ] = True
if line [ 0 ] == '-' :
info [ 'tryretr' ] = True
if line [ 0 ] == 'l' :
info [ 'trycwd' ] = info [ 'tryretr' ] = True
parts = line . split ( )
if len ( parts ) < 7 :
return None
del parts [ 0 ]
# skip permissions
if parts [ 0 ] != 'folder' :
del parts [ 0 ]
# skip nlink
del parts [ 0 ]
# skip uid
del parts [ 0 ]
# skip gid or size
if not ismonth ( parts [ 0 ] ) :
del parts [ 0 ]
# skip size
if not ismonth ( parts [ 0 ] ) :
return None
del parts [ 0 ]
# skip month
del parts [ 0 ]
# skip day
if not parts :
return None
del parts [ 0 ]
# skip year or time
name = " " . join ( parts )
# resolve links
if line [ 0 ] == 'l' and ' -> ' in name :
name = name . split ( ' -> ' , 1 ) [ 1 ]
# eliminate extra NetWare spaces
if line [ 1 ] in ' [' and name . startswith ( ' ' ) :
name = name [ 3 : ]
info [ "name" ] = name
return info
# MultiNet ( some spaces removed from examples )
# "00README . TXT ; 1 2 30 - DEC - 1996 17:44 [ SYSTEM ] ( RWED , RWED , RE , RE ) "
# " CORE . DIR ; 1 1 8 - SEP - 1996 16:09 [ SYSTEM ] ( RWE , RWE , RE , RE ) "
# and non - MutliNet VMS :
# " CII - MANUAL . TEX ; 1 213/216 29 - JAN - 1996 03:33:12 [ ANONYMOU , ANONYMOUS ] ( RWED , RWED , , ) "
i = line . find ( ';' )
if i != - 1 :
name = line [ : i ]
if name . endswith ( ".DIR" ) :
name = name [ : - 4 ]
info [ "trycwd" ] = True
else :
info [ "tryretr" ] = True
info [ "name" ] = name
return info
# MSDOS format
# 04-27-00 09:09PM < DIR > licensed
# 07-18-00 10:16AM < DIR > pub
# 04-14-00 03:47PM 589 readme . htm
if line [ 0 ] . isdigit ( ) :
parts = line . split ( )
if len ( parts ) != 4 :
return None
info [ 'name' ] = parts [ 3 ]
if parts [ 2 ] [ 0 ] == '<' :
info [ 'trycwd' ] = True
else :
info [ 'tryretr' ] = True
return info
# Some useless lines , safely ignored :
# " Total of 11 Files , 10966 Blocks . " ( VMS )
# " total 14786 " ( UNIX )
# " DISK $ ANONFTP : [ ANONYMOUS ] " ( VMS )
# " Directory DISK $ PCSA : [ ANONYM ] " ( VMS )
return None |
def _get_rules_from_aws ( self ) :
"""Load the EC2 security rules off AWS into a list of dict .
Returns :
list""" | list_of_rules = list ( )
if self . profile :
boto3 . setup_default_session ( profile_name = self . profile )
if self . region :
ec2 = boto3 . client ( 'ec2' , region_name = self . region )
else :
ec2 = boto3 . client ( 'ec2' )
security_groups = ec2 . describe_security_groups ( Filters = self . filters )
for group in security_groups [ 'SecurityGroups' ] :
group_dict = dict ( )
group_dict [ 'id' ] = group [ 'GroupId' ]
group_dict [ 'name' ] = group [ 'GroupName' ]
group_dict [ 'description' ] = group . get ( 'Description' , None )
if ( group . get ( 'IpPermissions' , None ) or group . get ( 'IpPermissionsEgress' , None ) ) :
group_dict [ 'rules' ] = list ( )
for rule in group . get ( 'IpPermissions' , None ) :
rule_dict = self . _build_rule ( rule )
rule_dict [ 'direction' ] = "INGRESS"
group_dict [ 'rules' ] . append ( rule_dict )
for rule in group . get ( 'IpPermissionsEgress' , None ) :
rule_dict = self . _build_rule ( rule )
rule_dict [ 'direction' ] = "EGRESS"
group_dict [ 'rules' ] . append ( rule_dict )
list_of_rules . append ( group_dict )
return list_of_rules |
def get ( ) :
"""Get all nagios status information from a local nagios instance""" | livestatus = mk_livestatus ( )
hosts = livestatus . get_hosts ( )
services = livestatus . get_services ( )
result = { }
result [ 'hosts' ] = hosts
result [ 'services' ] = services
return result |
def import_keys ( self , key_data ) :
"""Import the key _ data into our keyring .
> > > import shutil
> > > shutil . rmtree ( " doctests " )
> > > gpg = gnupg . GPG ( homedir = " doctests " )
> > > inpt = gpg . gen _ key _ input ( )
> > > key1 = gpg . gen _ key ( inpt )
> > > print1 = str ( key1 . fingerprint )
> > > pubkey1 = gpg . export _ keys ( print1)
> > > seckey1 = gpg . export _ keys ( print1 , secret = True )
> > > key2 = gpg . gen _ key ( inpt )
> > > print2 = key2 . fingerprint
> > > seckeys = gpg . list _ keys ( secret = True )
> > > pubkeys = gpg . list _ keys ( )
> > > assert print1 in seckeys . fingerprints
> > > assert print1 in pubkeys . fingerprints
> > > str ( gpg . delete _ keys ( print1 ) )
' Must delete secret key first '
> > > str ( gpg . delete _ keys ( print1 , secret = True ) )
' ok '
> > > str ( gpg . delete _ keys ( print1 ) )
' ok '
> > > pubkeys = gpg . list _ keys ( )
> > > assert not print1 in pubkeys . fingerprints
> > > result = gpg . import _ keys ( pubkey1)
> > > pubkeys = gpg . list _ keys ( )
> > > seckeys = gpg . list _ keys ( secret = True )
> > > assert not print1 in seckeys . fingerprints
> > > assert print1 in pubkeys . fingerprints
> > > result = gpg . import _ keys ( seckey1)
> > > assert result
> > > seckeys = gpg . list _ keys ( secret = True )
> > > assert print1 in seckeys . fingerprints""" | # # xxx need way to validate that key _ data is actually a valid GPG key
# # it might be possible to use - - list - packets and parse the output
result = self . _result_map [ 'import' ] ( self )
log . info ( 'Importing: %r' , key_data [ : 256 ] )
data = _make_binary_stream ( key_data , self . _encoding )
self . _handle_io ( [ '--import' ] , data , result , binary = True )
data . close ( )
return result |
def addMibSource ( self , * mibSources ) :
"""Adds path to repository to search PySNMP MIB files .
Parameters
* mibSources :
one or more paths to search or Python package names to import
and search for PySNMP MIB modules .
Returns
: : py : class : ` ~ pysnmp . smi . rfc1902 . ObjectIdentity `
reference to itself
Notes
Normally , ASN . 1 - to - Python MIB modules conversion is performed
automatically through PySNMP / PySMI interaction . ASN1 MIB modules
could also be manually compiled into Python via the
` mibdump . py < http : / / snmplabs . com / pysmi / mibdump . html > ` _
tool .
Examples
> > > ObjectIdentity ( ' SNMPv2 - MIB ' , ' sysDescr ' ) . addMibSource ( ' / opt / pysnmp / mibs ' , ' pysnmp _ mibs ' )
ObjectIdentity ( ' SNMPv2 - MIB ' , ' sysDescr ' )""" | if self . _mibSourcesToAdd is None :
self . _mibSourcesToAdd = mibSources
else :
self . _mibSourcesToAdd += mibSources
return self |
def change_view ( self , change_in_depth ) :
"""Change the view depth by expand or collapsing all same - level nodes""" | self . current_view_depth += change_in_depth
if self . current_view_depth < 0 :
self . current_view_depth = 0
self . collapseAll ( )
if self . current_view_depth > 0 :
for item in self . get_items ( maxlevel = self . current_view_depth - 1 ) :
item . setExpanded ( True ) |
def bootstrap ( force = False ) :
'''Download and install the latest version of the Chocolatey package manager
via the official bootstrap .
Chocolatey requires Windows PowerShell and the . NET v4.0 runtime . Depending
on the host ' s version of Windows , chocolatey . bootstrap will attempt to
ensure these prerequisites are met by downloading and executing the
appropriate installers from Microsoft .
Note that if PowerShell is installed , you may have to restart the host
machine for Chocolatey to work .
force
Run the bootstrap process even if Chocolatey is found in the path .
CLI Example :
. . code - block : : bash
salt ' * ' chocolatey . bootstrap
salt ' * ' chocolatey . bootstrap force = True''' | # Check if Chocolatey is already present in the path
try :
choc_path = _find_chocolatey ( __context__ , __salt__ )
except CommandExecutionError :
choc_path = None
if choc_path and not force :
return 'Chocolatey found at {0}' . format ( choc_path )
# The following lookup tables are required to determine the correct
# download required to install PowerShell . That ' s right , there ' s more
# than one ! You ' re welcome .
ps_downloads = { ( 'Vista' , 'x86' ) : 'http://download.microsoft.com/download/A/7/5/A75BC017-63CE-47D6-8FA4-AFB5C21BAC54/Windows6.0-KB968930-x86.msu' , ( 'Vista' , 'AMD64' ) : 'http://download.microsoft.com/download/3/C/8/3C8CF51E-1D9D-4DAA-AAEA-5C48D1CD055C/Windows6.0-KB968930-x64.msu' , ( '2008Server' , 'x86' ) : 'http://download.microsoft.com/download/F/9/E/F9EF6ACB-2BA8-4845-9C10-85FC4A69B207/Windows6.0-KB968930-x86.msu' , ( '2008Server' , 'AMD64' ) : 'http://download.microsoft.com/download/2/8/6/28686477-3242-4E96-9009-30B16BED89AF/Windows6.0-KB968930-x64.msu' }
# It took until . NET v4.0 for Microsoft got the hang of making installers ,
# this should work under any version of Windows
net4_url = 'http://download.microsoft.com/download/1/B/E/1BE39E79-7E39-46A3-96FF-047F95396215/dotNetFx40_Full_setup.exe'
temp_dir = tempfile . gettempdir ( )
# Check if PowerShell is installed . This should be the case for every
# Windows release following Server 2008.
ps_path = 'C:\\Windows\\SYSTEM32\\WindowsPowerShell\\v1.0\\powershell.exe'
if not __salt__ [ 'cmd.has_exec' ] ( ps_path ) :
if ( __grains__ [ 'osrelease' ] , __grains__ [ 'cpuarch' ] ) in ps_downloads : # Install the appropriate release of PowerShell v2.0
url = ps_downloads [ ( __grains__ [ 'osrelease' ] , __grains__ [ 'cpuarch' ] ) ]
dest = os . path . join ( temp_dir , 'powershell.exe' )
__salt__ [ 'cp.get_url' ] ( url , dest )
cmd = [ dest , '/quiet' , '/norestart' ]
result = __salt__ [ 'cmd.run_all' ] ( cmd , python_shell = False )
if result [ 'retcode' ] != 0 :
err = ( 'Installing Windows PowerShell failed. Please run the ' 'installer GUI on the host to get a more specific ' 'reason.' )
raise CommandExecutionError ( err )
else :
err = 'Windows PowerShell not found'
raise CommandNotFoundError ( err )
# Run the . NET Framework 4 web installer
dest = os . path . join ( temp_dir , 'dotnet4.exe' )
__salt__ [ 'cp.get_url' ] ( net4_url , dest )
cmd = [ dest , '/q' , '/norestart' ]
result = __salt__ [ 'cmd.run_all' ] ( cmd , python_shell = False )
if result [ 'retcode' ] != 0 :
err = ( 'Installing .NET v4.0 failed. Please run the installer GUI on ' 'the host to get a more specific reason.' )
raise CommandExecutionError ( err )
# Run the Chocolatey bootstrap .
cmd = ( '{0} -NoProfile -ExecutionPolicy unrestricted ' '-Command "iex ((new-object net.webclient).' 'DownloadString(\'https://chocolatey.org/install.ps1\'))" ' '&& SET PATH=%PATH%;%systemdrive%\\chocolatey\\bin' . format ( ps_path ) )
result = __salt__ [ 'cmd.run_all' ] ( cmd , python_shell = True )
if result [ 'retcode' ] != 0 :
raise CommandExecutionError ( 'Bootstrapping Chocolatey failed: {0}' . format ( result [ 'stderr' ] ) )
return result [ 'stdout' ] |
def ampliconfile ( self , sample , contig , amplicon_range , forward_primer , reverse_primer ) :
"""Extracts amplicon sequence from contig file
: param sample : sample metadata object
: param contig : name of the contig hit by primers
: param amplicon _ range : range of the amplicon within the contig
: param forward _ primer : name of the forward primer
: param reverse _ primer : name of the reverse primer""" | # Open the file
with open ( sample [ self . analysistype ] . ampliconfile , 'a' ) as ampliconfile :
try : # Load the records from the assembly into the dictionary
for record in SeqIO . parse ( sample [ self . analysistype ] . assemblyfile , 'fasta' ) :
if record . id == contig :
try : # Extract the name of the gene from the primer name
genename = forward_primer [ 0 ] . split ( '-' ) [ 0 ]
try : # Sort the range calculated above
start = amplicon_range [ 0 ]
end = amplicon_range [ 1 ]
# Slice the gene sequence from the sequence record - remember to subtract one to
# allow for zero - based indexing
genesequence = str ( record . seq ) [ int ( start ) - 1 : int ( end ) ]
# Set the record . id to be the sample name , the contig name ,
# the range , and the primers
record . id = '{sn}_{contig}_{range}_{primers}' . format ( sn = sample . name , contig = contig , range = '_' . join ( str ( x ) for x in sorted ( sample [ self . analysistype ] . range [ record . id ] [ genename ] ) ) , primers = '_' . join ( [ '_' . join ( forward_primer ) , '_' . join ( reverse_primer ) ] ) )
# Clear the record . description
record . description = ''
# Create a seq record from the sliced genome sequence
record . seq = Seq . Seq ( genesequence )
# Write the amplicon to file
SeqIO . write ( record , ampliconfile , 'fasta' )
except IndexError :
pass
except AttributeError :
pass
except FileNotFoundError :
pass |
def get_key_delivery_url ( access_token , ck_id , key_type ) :
'''Get Media Services Key Delivery URL .
Args :
access _ token ( str ) : A valid Azure authentication token .
ck _ id ( str ) : A Media Service Content Key ID .
key _ type ( str ) : A Media Service key Type .
Returns :
HTTP response . JSON body .''' | path = '/ContentKeys'
full_path = '' . join ( [ path , "('" , ck_id , "')" , "/GetKeyDeliveryUrl" ] )
endpoint = '' . join ( [ ams_rest_endpoint , full_path ] )
body = '{"keyDeliveryType": "' + key_type + '"}'
return do_ams_post ( endpoint , full_path , body , access_token ) |
def factorial ( n ) :
"""Factorial function that works with really big numbers .""" | if isinstance ( n , float ) :
if n . is_integer ( ) :
n = int ( n )
if not isinstance ( n , INT_TYPES ) :
raise TypeError ( "Non-integer input (perhaps you need Euler Gamma " "function or Gauss Pi function)" )
if n < 0 :
raise ValueError ( "Input shouldn't be negative" )
return reduce ( operator . mul , it . takewhile ( lambda m : m <= n , it . count ( 2 ) ) , 1 ) |
def run_miner_if_free ( self ) :
"""TODO : docstring""" | ( address , username , password , device , tstart , tend ) = read_config ( )
if self . dtype == 0 :
self . run_miner_cmd = [ cpu_miner_path , '-o' , address , '-O' , '{}:{}' . format ( username , password ) ]
elif self . dtype == 1 : # parse address - > scheme + netloc
r = urlparse ( address )
# scheme : / / user [ : password ] @ hostname : port
url = '{}://{}:{}@{}' . format ( r . scheme , username , password , r . netloc )
# Cuda
self . run_miner_cmd = [ gpu_miner_path , '-P' , url , '-U' ]
if ( len ( self . run_miner_cmd ) != 0 ) :
logger . info ( ' ' . join ( self . run_miner_cmd ) )
# start if resource ( cpu or gpu ) is free
if ( self . is_device_free ( ) ) :
logger . info ( 'start miner in another thread' )
self . run_cmd ( self . run_miner_cmd ) |
def __check_response_for_fedex_error ( self ) :
"""This checks the response for general Fedex errors that aren ' t related
to any one WSDL .""" | if self . response . HighestSeverity == "FAILURE" :
for notification in self . response . Notifications :
if notification . Severity == "FAILURE" :
raise FedexFailure ( notification . Code , notification . Message ) |
def __global_logging_exception_handler ( exc_type , exc_value , exc_traceback ) :
'''This function will log all un - handled python exceptions .''' | if exc_type . __name__ == "KeyboardInterrupt" : # Do not log the exception or display the traceback on Keyboard Interrupt
# Stop the logging queue listener thread
if is_mp_logging_listener_configured ( ) :
shutdown_multiprocessing_logging_listener ( )
else : # Log the exception
logging . getLogger ( __name__ ) . error ( 'An un-handled exception was caught by salt\'s global exception ' 'handler:\n%s: %s\n%s' , exc_type . __name__ , exc_value , '' . join ( traceback . format_exception ( exc_type , exc_value , exc_traceback ) ) . strip ( ) )
# Call the original sys . excepthook
sys . __excepthook__ ( exc_type , exc_value , exc_traceback ) |
def set_seed ( self , seed ) :
"""Override default values for random initial topic assignment ,
set to " seed " instead .
seed is 2 - d array ( number of samples in LDA model x number
of tokens in LDA model )""" | assert seed . dtype == np . int and seed . shape == ( self . samples , self . N )
self . topic_seed = seed |
def after_initial_csrf ( self , response ) :
"""This method is called * only * if the crawler is started with an
email and password combination .
In order to log in , we need a CSRF token from a GET request . This
method takes the result of a GET request , extracts the CSRF token ,
and uses it to make a login request . The response to this login
request will be handled by the ` after _ initial _ login ` method .""" | login_url = ( URLObject ( "http://" ) . with_hostname ( self . domain ) . with_port ( self . port ) . with_path ( LOGIN_API_PATH ) )
credentials = { "email" : self . login_email , "password" : self . login_password , }
headers = { b"X-CSRFToken" : get_csrf_token ( response ) , }
yield scrapy . FormRequest ( login_url , formdata = credentials , headers = headers , callback = self . after_initial_login , errback = self . handle_error ) |
def ExamineEvent ( self , mediator , event ) :
"""Analyzes an event .
Args :
mediator ( AnalysisMediator ) : mediates interactions between analysis
plugins and other components , such as storage and dfvfs .
event ( EventObject ) : event to examine .""" | # Only interested in filesystem events .
if event . data_type != 'fs:stat' :
return
filename = getattr ( event , 'filename' , None )
if not filename :
return
# Determine if we have a Chrome extension ID .
if 'chrome' not in filename . lower ( ) :
return
if not self . _sep :
self . _sep = self . _GetPathSegmentSeparator ( filename )
if '{0:s}Extensions{0:s}' . format ( self . _sep ) not in filename :
return
# Now we have extension IDs , let ' s check if we ' ve got the
# folder , nothing else .
paths = filename . split ( self . _sep )
if paths [ - 2 ] != 'Extensions' :
return
extension_identifier = paths [ - 1 ]
if extension_identifier == 'Temp' :
return
# Get the user and ID .
user = mediator . GetUsernameForPath ( filename )
# We still want this information in here , so that we can
# manually deduce the username .
if not user :
if len ( filename ) > 25 :
user = 'Not found ({0:s}...)' . format ( filename [ 0 : 25 ] )
else :
user = 'Not found ({0:s})' . format ( filename )
extension_string = self . _GetTitleFromChromeWebStore ( extension_identifier )
if not extension_string :
extension_string = extension_identifier
self . _results . setdefault ( user , [ ] )
if ( extension_string , extension_identifier ) not in self . _results [ user ] :
self . _results [ user ] . append ( ( extension_string , extension_identifier ) ) |
def get_corpus ( self ) :
"""获取语料库
Return :
corpus - - 语料库 , str类型""" | # 正向判定
corpus = [ ]
cd = 0
tag = None
for i in range ( 0 , self . init_corpus [ 0 ] [ 0 ] ) :
init_unit = self . unit_raw [ self . init_corpus [ 0 ] [ 0 ] - i ]
cdm = CDM ( init_unit )
alpha = cdm . get_alpha ( )
if cd <= self . cd_min and cdm . NC is not 0 :
tag = True
if cd > self . cd_max or cdm . NC == 0 :
tag = False
if cd in range ( self . cd_min + 1 , self . cd_max ) and cdm . NC is not 0 :
if alpha > 0 :
tag = True
else :
tag = False
if cdm . NC == 0 :
cd += 1
else :
cd = 0
if tag == True :
corpus . append ( init_unit )
elif tag == False :
if alpha < 0 or cd > self . cd_max :
break
else :
continue
corpus = list ( reversed ( corpus ) )
try :
self . index = self . init_corpus [ 0 ] [ 0 ] - i + 1
except UnboundLocalError :
log ( 'err' , '正向判定完成,索引定位出错' )
self . index = self . init_corpus [ 0 ] [ 0 ]
# 反向判定
cd = 0
tag = None
for i in range ( 1 , len ( self . unit_raw ) - self . init_corpus [ 0 ] [ 0 ] ) :
init_unit = self . unit_raw [ self . init_corpus [ 0 ] [ 0 ] + i ]
cdm = CDM ( init_unit )
alpha = cdm . get_alpha ( )
if cd <= self . cd_min and cdm . NC is not 0 :
tag = True
if cd > self . cd_max or cdm . NC == 0 :
tag = False
if cd in range ( self . cd_min + 1 , self . cd_max ) and cdm . NC is not 0 :
if alpha > 0 :
tag = True
else :
tag = False
if cdm . NC == 0 :
cd += 1
else :
cd = 0
if tag == True :
corpus . append ( init_unit )
elif tag == False :
if alpha < 0 or cd > self . cd_max :
break
else :
continue
log ( 'debug' , '\n获取语料库成功:【{}】\n' . format ( corpus ) )
return '' . join ( corpus ) |
def gen_txt_repr ( self , hdrs , register = True ) : # type : ( Union [ H2Frame , List [ HPackHeaders ] ] , Optional [ bool ] ) - > str
"""gen _ txt _ repr returns a " textual " representation of the provided
headers .
The output of this function is compatible with the input of
parse _ txt _ hdrs .
@ param H2Frame | list of HPackHeaders hdrs : the list of headers to convert to textual representation # noqa : E501
@ param bool : whether incremental headers should be added to the dynamic table as we generate the text # noqa : E501
representation
@ return str : the textual representation of the provided headers
@ raise AssertionError""" | lst = [ ]
if isinstance ( hdrs , H2Frame ) :
hdrs = hdrs . payload . hdrs
for hdr in hdrs :
try :
if isinstance ( hdr , HPackIndexedHdr ) :
lst . append ( '{}' . format ( self [ hdr . index ] ) )
elif isinstance ( hdr , ( HPackLitHdrFldWithIncrIndexing , HPackLitHdrFldWithoutIndexing ) ) :
if hdr . index != 0 :
name = self [ hdr . index ] . name ( )
else :
name = hdr . hdr_name . getfieldval ( 'data' ) . origin ( )
if name . startswith ( ':' ) :
lst . append ( '{} {}' . format ( name , hdr . hdr_value . getfieldval ( 'data' ) . origin ( ) ) )
else :
lst . append ( '{}: {}' . format ( name , hdr . hdr_value . getfieldval ( 'data' ) . origin ( ) ) )
if register and isinstance ( hdr , HPackLitHdrFldWithIncrIndexing ) : # noqa : E501
self . register ( hdr )
except KeyError as e : # raised when an index is out - of - bound
print ( e )
continue
return '\n' . join ( lst ) |
def time_segments_average ( X , interval , time_column ) :
"""Compute average of values over fixed length time segments .""" | warnings . warn ( _TIME_SEGMENTS_AVERAGE_DEPRECATION_WARNING , DeprecationWarning )
if isinstance ( X , np . ndarray ) :
X = pd . DataFrame ( X )
X = X . sort_values ( time_column ) . set_index ( time_column )
start_ts = X . index . values [ 0 ]
max_ts = X . index . values [ - 1 ]
values = list ( )
index = list ( )
while start_ts <= max_ts :
end_ts = start_ts + interval
subset = X . loc [ start_ts : end_ts - 1 ]
means = subset . mean ( skipna = True ) . values
values . append ( means )
index . append ( start_ts )
start_ts = end_ts
return np . asarray ( values ) , np . asarray ( index ) |
def _load_file ( self , f ) :
"""Get values from config file""" | try :
with open ( f , 'r' ) as _fo :
_seria_in = seria . load ( _fo )
_y = _seria_in . dump ( 'yaml' )
except IOError :
raise FiggypyError ( "could not open configuration file" )
self . values . update ( yaml . load ( _y ) ) |
def as_es2_command ( command ) :
"""Modify a desktop command so it works on es2.""" | if command [ 0 ] == 'FUNC' :
return ( command [ 0 ] , re . sub ( r'^gl([A-Z])' , lambda m : m . group ( 1 ) . lower ( ) , command [ 1 ] ) ) + command [ 2 : ]
if command [ 0 ] == 'SHADERS' :
return command [ : 2 ] + convert_shaders ( 'es2' , command [ 2 : ] )
if command [ 0 ] == 'UNIFORM' :
return command [ : - 1 ] + ( command [ - 1 ] . tolist ( ) , )
return command |
def reverse_media_url ( target_type , url_string , * args , ** kwargs ) :
'''Given a target type and an resource URL , generates a valid URL to this via''' | args_str = '<%s>' % '><' . join ( args )
kwargs_str = '<%s>' % '><' . join ( '%s:%s' % pair for pair in kwargs . items ( ) )
url_str = '' . join ( [ url_string , args_str , kwargs_str ] )
normalized_url = str ( ResourceURL ( url_str ) )
query_tuples = [ ]
if singletons . settings . SECURITY and 'Sha1' in singletons . settings . SECURITY :
secret = singletons . settings . HMAC_SECRET
digest = get_hmac_sha1_digest ( secret , normalized_url , target_type )
query_tuples . append ( ( 'digest' , digest ) )
# Add in URL as last querystring argument
query_tuples . append ( ( 'url' , normalized_url ) )
querystring = urlencode ( query_tuples )
scheme = singletons . settings . EXTERNAL_SCHEME
host = singletons . settings . EXTERNAL_HOST
port = singletons . settings . EXTERNAL_PORT
if not host :
host = singletons . settings . HOST
if not port :
port = singletons . settings . PORT
port_suffix = ':%s' % port if port != 80 else ''
typestring_normalized = str ( TypeString ( target_type ) )
return '%s://%s%s/media/%s/?%s' % ( scheme , host , port_suffix , typestring_normalized , querystring , ) |
def _resolve ( self ) :
"""resolve the type symbol from name by doing a lookup""" | self . __is_resolved = True
if self . is_complex :
type = self . nested if self . nested else self
type . __reference = self . module . lookup ( type . name ) |
def karbasa ( self , result ) :
"""Finding if class probabilities are close to eachother
Ratio of the distance between 1st and 2nd class ,
to the distance between 1st and last class .
: param result : The dict returned by LM . calculate ( )""" | probs = result [ 'all_probs' ]
probs . sort ( )
return float ( probs [ 1 ] - probs [ 0 ] ) / float ( probs [ - 1 ] - probs [ 0 ] ) |
def from_fortran_file ( cls , fortran_file : str , tmpdir : str = "." ) :
"""Builds GrFN object from a Fortran program .""" | stem = Path ( fortran_file ) . stem
if tmpdir == "." and "/" in fortran_file :
tmpdir = Path ( fortran_file ) . parent
preprocessed_fortran_file = f"{tmpdir}/{stem}_preprocessed.f"
lambdas_path = f"{tmpdir}/{stem}_lambdas.py"
json_filename = stem + ".json"
with open ( fortran_file , "r" ) as f :
inputLines = f . readlines ( )
with open ( preprocessed_fortran_file , "w" ) as f :
f . write ( preprocessor . process ( inputLines ) )
xml_string = sp . run ( [ "java" , "fortran.ofp.FrontEnd" , "--class" , "fortran.ofp.XMLPrinter" , "--verbosity" , "0" , preprocessed_fortran_file , ] , stdout = sp . PIPE , ) . stdout
trees = [ ET . fromstring ( xml_string ) ]
comments = get_comments . get_comments ( preprocessed_fortran_file )
os . remove ( preprocessed_fortran_file )
xml_to_json_translator = translate . XMLToJSONTranslator ( )
outputDict = xml_to_json_translator . analyze ( trees , comments )
pySrc = pyTranslate . create_python_source_list ( outputDict ) [ 0 ] [ 0 ]
G = cls . from_python_src ( pySrc , lambdas_path , json_filename , stem )
return G |
def to_xml ( self ) : # pylint : disable = R0912
"""Converts object into an XML string .""" | xml = ''
for asset in self . assets :
xml += '<asset filename="%s" ' % os . path . basename ( asset [ 'filename' ] )
xml += ' refid="%(refid)s"' % asset
xml += ' size="%(size)s"' % asset
xml += ' hash-code="%s"' % asset [ 'hash-code' ]
xml += ' type="%(type)s"' % asset
if asset . get ( 'encoding-rate' , None ) :
xml += ' encoding-rate="%s"' % asset [ 'encoding-rate' ]
if asset . get ( 'frame-width' , None ) :
xml += ' frame-width="%s"' % asset [ 'frame-width' ]
if asset . get ( 'frame-height' , None ) :
xml += ' frame-height="%s"' % asset [ 'frame-height' ]
if asset . get ( 'display-name' , None ) :
xml += ' display-name="%s"' % asset [ 'display-name' ]
if asset . get ( 'encode-to' , None ) :
xml += ' encode-to="%s"' % asset [ 'encode-to' ]
if asset . get ( 'encode-multiple' , None ) :
xml += ' encode-multiple="%s"' % asset [ 'encode-multiple' ]
if asset . get ( 'h264-preserve-as-rendition' , None ) :
xml += ' h264-preserve-as-rendition="%s"' % asset [ 'h264-preserve-as-rendition' ]
if asset . get ( 'h264-no-processing' , None ) :
xml += ' h264-no-processing="%s"' % asset [ 'h264-no-processing' ]
xml += ' />\n'
xml += '<title name="%(name)s" refid="%(referenceId)s" active="TRUE" '
if self . start_date :
xml += 'start-date="%(start_date)s" '
if self . end_date :
xml += 'end-date="%(end_date)s" '
for asset in self . assets :
if asset . get ( 'encoding-rate' , None ) == None :
choice = enums . AssetTypeEnum
if asset . get ( 'type' , None ) == choice . VIDEO_FULL :
xml += 'video-full-refid="%s" ' % asset . get ( 'refid' )
if asset . get ( 'type' , None ) == choice . THUMBNAIL :
xml += 'thumbnail-refid="%s" ' % asset . get ( 'refid' )
if asset . get ( 'type' , None ) == choice . VIDEO_STILL :
xml += 'video-still-refid="%s" ' % asset . get ( 'refid' )
if asset . get ( 'type' , None ) == choice . FLV_BUMPER :
xml += 'flash-prebumper-refid="%s" ' % asset . get ( 'refid' )
xml += '>\n'
if self . short_description :
xml += '<short-description><![CDATA[%(shortDescription)s]]>'
xml += '</short-description>\n'
if self . long_description :
xml += '<long-description><![CDATA[%(longDescription)s]]>'
xml += '</long-description>\n'
for tag in self . tags :
xml += '<tag><![CDATA[%s]]></tag>\n' % tag
for asset in self . assets :
if asset . get ( 'encoding-rate' , None ) :
xml += '<rendition-refid>%s</rendition-refid>\n' % asset [ 'refid' ]
for meta in self . metadata :
xml += '<custom-%s-value name="%s">%s</custom-%s-value>' % ( meta [ 'type' ] , meta [ 'key' ] , meta [ 'value' ] , meta [ 'type' ] )
xml += '</title>'
xml = xml % self . _to_dict ( )
return xml |
def _delegate ( self , path ) : # type : ( Text ) - > Tuple [ FS , Text ]
"""Get the delegate FS for a given path .
Arguments :
path ( str ) : A path .
Returns :
( FS , str ) : a tuple of ` ` ( < fs > , < path > ) ` ` for a mounted filesystem ,
or ` ` ( None , None ) ` ` if no filesystem is mounted on the
given ` ` path ` ` .""" | _path = forcedir ( abspath ( normpath ( path ) ) )
is_mounted = _path . startswith
for mount_path , fs in self . mounts :
if is_mounted ( mount_path ) :
return fs , _path [ len ( mount_path ) : ] . rstrip ( "/" )
return self . default_fs , path |
async def retract ( self , mount : top_types . Mount , margin : float ) :
"""Pull the specified mount up to its home position .
Works regardless of critical point or home status .""" | smoothie_ax = Axis . by_mount ( mount ) . name . upper ( )
async with self . _motion_lock :
smoothie_pos = self . _backend . fast_home ( smoothie_ax , margin )
self . _current_position = self . _deck_from_smoothie ( smoothie_pos ) |
def _add_transaction_to_canonical_chain ( db : BaseDB , transaction_hash : Hash32 , block_header : BlockHeader , index : int ) -> None :
""": param bytes transaction _ hash : the hash of the transaction to add the lookup for
: param block _ header : The header of the block with the txn that is in the canonical chain
: param int index : the position of the transaction in the block
- add lookup from transaction hash to the block number and index that the body is stored at
- remove transaction hash to body lookup in the pending pool""" | transaction_key = TransactionKey ( block_header . block_number , index )
db . set ( SchemaV1 . make_transaction_hash_to_block_lookup_key ( transaction_hash ) , rlp . encode ( transaction_key ) , ) |
def _CreateLineString ( self , parent , coordinate_list ) :
"""Create a KML LineString element .
The points of the string are given in coordinate _ list . Every element of
coordinate _ list should be one of a tuple ( longitude , latitude ) or a tuple
( longitude , latitude , altitude ) .
Args :
parent : The parent ElementTree . Element instance .
coordinate _ list : The list of coordinates .
Returns :
The LineString ElementTree . Element instance or None if coordinate _ list is
empty .""" | if not coordinate_list :
return None
linestring = ET . SubElement ( parent , 'LineString' )
tessellate = ET . SubElement ( linestring , 'tessellate' )
tessellate . text = '1'
if len ( coordinate_list [ 0 ] ) == 3 :
altitude_mode = ET . SubElement ( linestring , 'altitudeMode' )
altitude_mode . text = 'absolute'
coordinates = ET . SubElement ( linestring , 'coordinates' )
if len ( coordinate_list [ 0 ] ) == 3 :
coordinate_str_list = [ '%f,%f,%f' % t for t in coordinate_list ]
else :
coordinate_str_list = [ '%f,%f' % t for t in coordinate_list ]
coordinates . text = ' ' . join ( coordinate_str_list )
return linestring |
def send_to_contact ( self , obj_id , contact_id ) :
"""Send email to a specific contact
: param obj _ id : int
: param contact _ id : int
: return : dict | str""" | response = self . _client . session . post ( '{url}/{id}/send/contact/{contact_id}' . format ( url = self . endpoint_url , id = obj_id , contact_id = contact_id ) )
return self . process_response ( response ) |
def query ( self , query , inplace = True ) :
"""State what you want to keep""" | # TODO : add to queue
result = self . data . query ( query , inplace = inplace )
return result |
def get_local_part ( value ) :
"""local - part = dot - atom / quoted - string / obs - local - part""" | local_part = LocalPart ( )
leader = None
if value [ 0 ] in CFWS_LEADER :
leader , value = get_cfws ( value )
if not value :
raise errors . HeaderParseError ( "expected local-part but found '{}'" . format ( value ) )
try :
token , value = get_dot_atom ( value )
except errors . HeaderParseError :
try :
token , value = get_word ( value )
except errors . HeaderParseError :
if value [ 0 ] != '\\' and value [ 0 ] in PHRASE_ENDS :
raise
token = TokenList ( )
if leader is not None :
token [ : 0 ] = [ leader ]
local_part . append ( token )
if value and ( value [ 0 ] == '\\' or value [ 0 ] not in PHRASE_ENDS ) :
obs_local_part , value = get_obs_local_part ( str ( local_part ) + value )
if obs_local_part . token_type == 'invalid-obs-local-part' :
local_part . defects . append ( errors . InvalidHeaderDefect ( "local-part is not dot-atom, quoted-string, or obs-local-part" ) )
else :
local_part . defects . append ( errors . ObsoleteHeaderDefect ( "local-part is not a dot-atom (contains CFWS)" ) )
local_part [ 0 ] = obs_local_part
try :
local_part . value . encode ( 'ascii' )
except UnicodeEncodeError :
local_part . defects . append ( errors . NonASCIILocalPartDefect ( "local-part contains non-ASCII characters)" ) )
return local_part , value |
async def answer_shipping_query ( self , shipping_query_id : base . String , ok : base . Boolean , shipping_options : typing . Union [ typing . List [ types . ShippingOption ] , None ] = None , error_message : typing . Union [ base . String , None ] = None ) -> base . Boolean :
"""If you sent an invoice requesting a shipping address and the parameter is _ flexible was specified ,
the Bot API will send an Update with a shipping _ query field to the bot .
Source : https : / / core . telegram . org / bots / api # answershippingquery
: param shipping _ query _ id : Unique identifier for the query to be answered
: type shipping _ query _ id : : obj : ` base . String `
: param ok : Specify True if delivery to the specified address is possible and False if there are any problems
( for example , if delivery to the specified address is not possible )
: type ok : : obj : ` base . Boolean `
: param shipping _ options : Required if ok is True . A JSON - serialized array of available shipping options
: type shipping _ options : : obj : ` typing . Union [ typing . List [ types . ShippingOption ] , None ] `
: param error _ message : Required if ok is False
Error message in human readable form that explains why it is impossible to complete the order
( e . g . " Sorry , delivery to your desired address is unavailable ' ) .
Telegram will display this message to the user .
: type error _ message : : obj : ` typing . Union [ base . String , None ] `
: return : On success , True is returned
: rtype : : obj : ` base . Boolean `""" | if shipping_options :
shipping_options = prepare_arg ( [ shipping_option . to_python ( ) if hasattr ( shipping_option , 'to_python' ) else shipping_option for shipping_option in shipping_options ] )
payload = generate_payload ( ** locals ( ) )
result = await self . request ( api . Methods . ANSWER_SHIPPING_QUERY , payload )
return result |
def enforce_duration ( self , duration_thresh ) :
"""This method takes a quantized pitch contour and filters out
those time sections where the contour is not long enough , as specified
by duration threshold ( given in milliseconds ) .
All transactions assume data in cent scale .""" | i = 1
while i < len ( self . pitch ) - 1 :
if self . pitch [ i ] == - 10000 :
i += 1
continue
if self . pitch [ i ] - self . pitch [ i - 1 ] != 0 and self . pitch [ i + 1 ] - self . pitch [ i ] == 0 :
start = i
while i < len ( self . pitch ) and self . pitch [ i + 1 ] - self . pitch [ i ] == 0 :
i += 1
if ( self . timestamps [ i ] - self . timestamps [ start ] ) * 1000 < duration_thresh :
self . pitch [ start : i + 1 ] = np . zeros ( i + 1 - start ) - 10000
else :
self . pitch [ i ] = - 10000
i += 1 |
def show_non_search_results ( log_rec , code_view = True , json_view = False , show_message_details = False ) :
"""show _ non _ search _ results
Show non - search results for search jobs like :
` ` index = " antinex " | stats count ` `
: param log _ rec : log record from splunk
: param code _ view : show as a normal tail - f < log file > view
: param json _ view : pretty print each log ' s dictionary
: param show _ message _ details""" | log_dict = None
try :
log_dict = json . loads ( log_rec )
except Exception as e :
log_dict = None
# end of try / ex
if not log_dict :
log . info ( ( '{}' ) . format ( ppj ( log_rec ) ) )
else :
log . info ( ( '{}' ) . format ( ppj ( log_dict ) ) ) |
def check_critical_load ( self ) :
"""Check for critical load and log an error if necessary .""" | if self . load_avg . intervals [ '1m' ] . value > 1 :
if self . last_load_level == 1 and time . time ( ) - self . last_load_log < 30 :
return
self . last_load_log = time . time ( )
self . last_load_level = 1
logger . error ( "Listener load limit exceeded, the system can't handle this!" , extra = self . _make_stats ( ) )
elif self . load_avg . intervals [ '1m' ] . value > 0.8 :
if self . last_load_level == 0.8 and time . time ( ) - self . last_load_log < 30 :
return
self . last_load_log = time . time ( )
self . last_load_level = 0.8
logger . warning ( "Listener load approaching critical!" , extra = self . _make_stats ( ) )
else :
self . last_load_log = - math . inf
self . last_load_level = 0 |
def min_width ( self ) -> int :
"""Minimum width necessary to render the block ' s contents .""" | return max ( max ( len ( e ) for e in self . content . split ( '\n' ) ) , # Only horizontal lines can cross 0 width blocks .
int ( any ( [ self . top , self . bottom ] ) ) ) |
def derivative ( self , x ) :
"""Return the operator derivative .
The derivative of the operator composition follows the chain
rule :
` ` OperatorComp ( left , right ) . derivative ( y ) = =
OperatorComp ( left . derivative ( right ( y ) ) , right . derivative ( y ) ) ` `
Parameters
x : ` domain ` ` element - like `
Evaluation point of the derivative . Needs to be usable as
input for the ` ` right ` ` operator .""" | if self . is_linear :
return self
else :
if self . left . is_linear :
left_deriv = self . left
else :
left_deriv = self . left . derivative ( self . right ( x ) )
right_deriv = self . right . derivative ( x )
return OperatorComp ( left_deriv , right_deriv , self . __tmp ) |
def get_attribute ( self , app = None , key = None ) :
"""Returns an application attribute
: param app : application id
: param key : attribute key or None to retrieve all values for the
given application
: returns : attribute value if key was specified , or an array of tuples
( key , value ) for each attribute
: raises : HTTPResponseError in case an HTTP error status was returned""" | path = 'getattribute'
if app is not None :
path += '/' + parse . quote ( app , '' )
if key is not None :
path += '/' + parse . quote ( self . _encode_string ( key ) , '' )
res = self . _make_ocs_request ( 'GET' , self . OCS_SERVICE_PRIVATEDATA , path )
if res . status_code == 200 :
tree = ET . fromstring ( res . content )
self . _check_ocs_status ( tree )
values = [ ]
for element in tree . find ( 'data' ) . iter ( 'element' ) :
app_text = element . find ( 'app' ) . text
key_text = element . find ( 'key' ) . text
value_text = element . find ( 'value' ) . text or ''
if key is None :
if app is None :
values . append ( ( app_text , key_text , value_text ) )
else :
values . append ( ( key_text , value_text ) )
else :
return value_text
if len ( values ) == 0 and key is not None :
return None
return values
raise HTTPResponseError ( res ) |
def update ( self , data ) :
"""update the state with new data , storing excess data
as necessary . may be called multiple times and if a
call sends less than a full block in size , the leftover
is cached and will be consumed in the next call
data : data to be hashed ( bytestring )""" | self . state = 2
BLKBYTES = self . BLKBYTES
# de - referenced for improved readability
BLKBITS = self . BLKBITS
datalen = len ( data )
if not datalen :
return
if type ( data ) == type ( u'' ) : # use either of the next two lines for a proper
# response under both Python2 and Python3
data = data . encode ( 'UTF-8' )
# converts to byte string
# data = bytearray ( data , ' utf - 8 ' ) # use if want mutable
# This next line works for Py3 but fails under
# Py2 because the Py2 version of bytes ( ) will
# accept only * one * argument . Arrrrgh ! ! !
# data = bytes ( data , ' utf - 8 ' ) # converts to immutable byte
# string but . . . under p7
# bytes ( ) wants only 1 arg
# . . . a dummy , 2nd argument like encoding = None
# that does nothing would at least allow
# compatibility between Python2 and Python3.
left = len ( self . cache )
fill = BLKBYTES - left
# if any cached data and any added new data will fill a
# full block , fill and compress
if left and datalen >= fill :
self . cache = self . cache + data [ : fill ]
self . t += BLKBITS
# update counter
self . _compress ( self . cache )
self . cache = b''
data = data [ fill : ]
datalen -= fill
# compress new data until not enough for a full block
while datalen >= BLKBYTES :
self . t += BLKBITS
# update counter
self . _compress ( data [ : BLKBYTES ] )
data = data [ BLKBYTES : ]
datalen -= BLKBYTES
# cache all leftover bytes until next call to update ( )
if datalen > 0 :
self . cache = self . cache + data [ : datalen ] |
def netconf_state_sessions_session_session_id ( self , ** kwargs ) :
"""Auto Generated Code""" | config = ET . Element ( "config" )
netconf_state = ET . SubElement ( config , "netconf-state" , xmlns = "urn:ietf:params:xml:ns:yang:ietf-netconf-monitoring" )
sessions = ET . SubElement ( netconf_state , "sessions" )
session = ET . SubElement ( sessions , "session" )
session_id = ET . SubElement ( session , "session-id" )
session_id . text = kwargs . pop ( 'session_id' )
callback = kwargs . pop ( 'callback' , self . _callback )
return callback ( config ) |
def static_transition ( timestamp , contract_dates , transition , holidays = None , validate_inputs = True ) :
"""An implementation of * get _ weights * parameter in roller ( ) .
Return weights to tradeable instruments for a given date based on a
transition DataFrame which indicates how to roll through the roll period .
Parameters
timestamp : pandas . Timestamp
The timestamp to return instrument weights for
contract _ dates : pandas . Series
Series with index of tradeable contract names and pandas . Timestamps
representing the last date of the roll as values , sorted by values .
Index must be unique and values must be strictly monotonic .
transition : pandas . DataFrame
A DataFrame with a index of integers representing business day offsets
from the last roll date and a column which is a MultiIndex where the
top level is generic instruments and the second level is
[ ' front ' , ' back ' ] which refer to the front month contract and the back
month contract of the roll . Note that for different generics , e . g . CL1,
CL2 , the front and back month contract during a roll would refer to
different underlying instruments . The values represent the fraction of
the roll on each day during the roll period . The first row of the
transition period should be completely allocated to the front contract
and the last row should be completely allocated to the back contract .
holidays : array _ like of datetime64 [ D ]
Holidays to exclude when calculating business day offsets from the last
roll date . See numpy . busday _ count .
validate _ inputs : Boolean
Whether or not to validate ordering of contract _ dates and transition .
* * Caution * * this is provided for speed however if this is set to False
and inputs are not defined properly algorithm may return incorrect
data .
Returns
A list of tuples consisting of the generic instrument name , the tradeable
contract as a string , the weight on this contract as a float and the date
as a pandas . Timestamp .
Examples
> > > import pandas as pd
> > > import mapping . mappings as mappings
> > > cols = pd . MultiIndex . from _ product ( [ [ " CL1 " , " CL2 " ] , [ ' front ' , ' back ' ] ] )
> > > idx = [ - 2 , - 1 , 0]
> > > transition = pd . DataFrame ( [ [ 1.0 , 0.0 , 1.0 , 0.0 ] , [ 0.5 , 0.5 , 0.5 , 0.5 ] ,
. . . [ 0.0 , 1.0 , 0.0 , 1.0 ] ] ,
. . . index = idx , columns = cols )
> > > contract _ dates = pd . Series ( [ pd . Timestamp ( ' 2016-10-20 ' ) ,
. . . pd . Timestamp ( ' 2016-11-21 ' ) ,
. . . pd . Timestamp ( ' 2016-12-20 ' ) ] ,
. . . index = [ ' CLX16 ' , ' CLZ16 ' , ' CLF17 ' ] )
> > > ts = pd . Timestamp ( ' 2016-10-19 ' )
> > > wts = mappings . static _ transition ( ts , contract _ dates , transition )""" | if validate_inputs : # required for MultiIndex slicing
_check_static ( transition . sort_index ( axis = 1 ) )
# the algorithm below will return invalid results if contract _ dates is
# not as expected so better to fail explicitly
_check_contract_dates ( contract_dates )
if not holidays :
holidays = [ ]
# further speedup can be obtained using contract _ dates . loc [ timestamp : ]
# but this requires swapping contract _ dates index and values
after_contract_dates = contract_dates . loc [ contract_dates >= timestamp ]
contracts = after_contract_dates . index
front_expiry_dt = after_contract_dates . iloc [ 0 ]
days_to_expiry = np . busday_count ( front_expiry_dt . date ( ) , timestamp . date ( ) , holidays = holidays )
name2num = dict ( zip ( transition . columns . levels [ 0 ] , range ( len ( transition . columns . levels [ 0 ] ) ) ) )
if days_to_expiry in transition . index :
weights_iter = transition . loc [ days_to_expiry ] . iteritems ( )
# roll hasn ' t started yet
elif days_to_expiry < transition . index . min ( ) : # provides significant speedup over transition . iloc [ 0 ] . iteritems ( )
vals = transition . values [ 0 ]
weights_iter = zip ( transition . columns . tolist ( ) , vals )
# roll is finished
else :
vals = transition . values [ - 1 ]
weights_iter = zip ( transition . columns . tolist ( ) , vals )
cwts = [ ]
for idx_tuple , weighting in weights_iter :
gen_name , position = idx_tuple
if weighting != 0 :
if position == "front" :
cntrct_idx = name2num [ gen_name ]
elif position == "back" :
cntrct_idx = name2num [ gen_name ] + 1
try :
cntrct_name = contracts [ cntrct_idx ]
except IndexError as e :
raise type ( e ) ( ( "index {0} is out of bounds in\n{1}\nas of {2} " "resulting from {3} mapping" ) . format ( cntrct_idx , after_contract_dates , timestamp , idx_tuple ) ) . with_traceback ( sys . exc_info ( ) [ 2 ] )
cwts . append ( ( gen_name , cntrct_name , weighting , timestamp ) )
return cwts |
async def decrypt ( self , ciphertext : bytes , sender : str = None ) -> ( bytes , str ) :
"""Decrypt ciphertext and optionally authenticate sender .
Raise BadKey if authentication operation reveals sender key distinct from current
verification key of owner of input DID . Raise WalletState if wallet is closed .
: param ciphertext : ciphertext , as bytes
: param sender : DID or verification key of sender , None for anonymously encrypted ciphertext
: return : decrypted bytes and sender verification key ( None for anonymous decryption )""" | LOGGER . debug ( 'BaseAnchor.decrypt >>> ciphertext: %s, sender: %s' , ciphertext , sender )
if not self . wallet . handle :
LOGGER . debug ( 'BaseAnchor.decrypt <!< Wallet %s is closed' , self . name )
raise WalletState ( 'Wallet {} is closed' . format ( self . name ) )
from_verkey = None
if sender :
from_verkey = await self . _verkey_for ( sender )
rv = await self . wallet . decrypt ( ciphertext , True if from_verkey else None , to_verkey = None , from_verkey = from_verkey )
LOGGER . debug ( 'BaseAnchor.decrypt <<< %s' , rv )
return rv |
def _force_close ( self ) :
"""Close connection without QUIT message""" | if self . _sock :
try :
self . _sock . close ( )
except : # noqa
pass
self . _sock = None
self . _rfile = None |
def set_consistent ( self , consistent_config ) :
"""Indicates that the stream is the start of a consistent region .
Args :
consistent _ config ( consistent . ConsistentRegionConfig ) : the configuration of the consistent region .
Returns :
Stream : Returns this stream .
. . versionadded : : 1.11""" | # add job control plane if needed
self . topology . _add_job_control_plane ( )
self . oport . operator . consistent ( consistent_config )
return self . _make_placeable ( ) |
def plotter ( molecules , ensemble_lookup , options ) :
"""plot ROC curves for ensembles in ensemble _ lookup
: param molecules :
: param ensemble _ lookup :
: param options :
: return :""" | try :
import matplotlib
matplotlib . use ( 'Agg' )
import matplotlib . pyplot as plt
except ImportError :
print ( "\n Plotting requires matplotlib to be installed\n" )
sys . exit ( 1 )
for ensemble in ensemble_lookup . keys ( ) : # create figure
fig = plt . figure ( )
# create the queries subplot , the left subplot
# create the left hand subplot
ax1 = fig . add_subplot ( 121 )
for query in sorted ( ensemble_lookup [ ensemble ] ) :
query_list = [ ]
query_list . append ( query )
score_structure = classification . make_score_structure ( molecules , query_list )
auc_structure = classification . make_auc_structure ( score_structure )
tpf = [ ]
fpf = [ ]
for mol in auc_structure :
fpf . append ( mol [ 4 ] )
tpf . append ( mol [ 5 ] )
# add axis - labels and a title
ax1 . set_xlabel ( 'FPF' )
ax1 . set_ylabel ( 'TPF' )
title = 'query performance'
ax1 . set_title ( title )
# add plot data and labels for the legend
lbl = query
ax1 . plot ( fpf , tpf , lw = 3 , label = lbl )
# get legend handles and labels , then reverse their order
handles , labels = ax1 . get_legend_handles_labels ( )
ax1 . legend ( handles [ : : - 1 ] , labels [ : : - 1 ] )
# add the legend
ax1 . legend ( handles , labels , loc = 'best' )
# create the ensemble subplot , the right subplot
score_structure = classification . make_score_structure ( molecules , ensemble_lookup [ ensemble ] )
auc_structure = classification . make_auc_structure ( score_structure )
tpf = [ ]
fpf = [ ]
for mol in auc_structure :
fpf . append ( mol [ 4 ] )
tpf . append ( mol [ 5 ] )
# create right hand subplot
ax2 = fig . add_subplot ( 122 )
# add axis - labels and a title
ax2 . set_xlabel ( 'FPF' )
ax2 . set_ylabel ( 'TPF' )
title = 'ensemble performance'
ax2 . set_title ( title )
# add plot data and a label for the legend
lbl = 'ensemble'
ax2 . plot ( fpf , tpf , lw = 3 , label = lbl )
# get legend handles and labels , then reverse their order
handles , labels = ax2 . get_legend_handles_labels ( )
ax2 . legend ( handles [ : : - 1 ] , labels [ : : - 1 ] )
# add the legend
ax2 . legend ( handles , labels , loc = 'best' )
# save figure
figurename = options . outname + '_' + ensemble . replace ( '.csv' , '' ) + '.pdf'
filename = os . path . join ( os . getcwd ( ) , figurename )
plt . savefig ( filename , bbobx = 'tight' , format = 'pdf' ) |
def get_id ( self ) :
"""Returns the id of the resource .""" | if self . _id_attr is None or not hasattr ( self , self . _id_attr ) :
return None
return getattr ( self , self . _id_attr ) |
def pretty_bytes ( num ) :
"""pretty print the given number of bytes""" | for unit in [ '' , 'KB' , 'MB' , 'GB' ] :
if num < 1024.0 :
if unit == '' :
return "%d" % ( num )
else :
return "%3.1f%s" % ( num , unit )
num /= 1024.0
return "%3.1f%s" % ( num , 'TB' ) |
def from_simplex ( x ) :
r"""Inteprets the last index of x as unit simplices and returns a
real array of the sampe shape in logit space .
Inverse to : func : ` to _ simplex ` ; see that function for more details .
: param np . ndarray : Array of unit simplices along the last index .
: rtype : ` ` np . ndarray ` `""" | n = x . shape [ - 1 ]
# z are the stick breaking fractions in [ 0,1]
# the last one is always 1 , so don ' t worry about it
z = np . empty ( shape = x . shape )
z [ ... , 0 ] = x [ ... , 0 ]
z [ ... , 1 : - 1 ] = x [ ... , 1 : - 1 ] / ( 1 - x [ ... , : - 2 ] . cumsum ( axis = - 1 ) )
# now z are the logit - transformed breaking fractions
z [ ... , : - 1 ] = logit ( z [ ... , : - 1 ] ) - logit ( 1 / ( n - np . arange ( n - 1 , dtype = np . float ) ) )
# set this to 0 manually to avoid subtracting inf - inf
z [ ... , - 1 ] = 0
return z |
def folderitems ( self ) :
"""Returns an array of dictionaries , each dictionary represents an
analysis row to be rendered in the list . The array returned is sorted
in accordance with the layout positions set for the analyses this
worksheet contains when the analyses were added in the worksheet .
: returns : list of dicts with the items to be rendered in the list""" | items = BaseView . folderitems ( self )
# Fill empty positions from the layout with fake rows . The worksheet
# can be generated by making use of a WorksheetTemplate , so there is
# the chance that some slots of this worksheet being empty . We need to
# render a row still , at lest to display the slot number ( Pos )
self . fill_empty_slots ( items )
# Sort the items in accordance with the layout
items = sorted ( items , key = itemgetter ( "pos_sortkey" ) )
# Fill the slot header cells ( first cell of each row ) . Each slot
# contains the analyses that belong to the same parent
# ( AnalysisRequest , ReferenceSample ) , so the information about the
# parent must be displayed in the first cell of each slot .
self . fill_slots_headers ( items )
return items |
def flatten ( * sequence ) :
"""Flatten nested sequences into one .""" | result = [ ]
for entry in sequence :
if isinstance ( entry , list ) :
result += Select . flatten ( * entry )
elif isinstance ( entry , tuple ) :
result += Select . flatten ( * entry )
else :
result . append ( entry )
return result |
def update ( self ) :
"""Updates the currently running animation .
This method should be called in every frame where you want an animation to run .
Its job is to figure out if it is time to move onto the next image in the animation .""" | returnValue = False
# typical return value
if self . state != PygAnimation . PLAYING :
return returnValue
# The job here is to figure out the index of the image to show
# and the matching elapsed time threshold for the current image
self . elapsed = ( time . time ( ) - self . playingStartTime )
if self . elapsed > self . elapsedStopTime : # anim finished
if self . loop : # restart the animation
self . playingStartTime = time . time ( )
self . nextElapsedThreshold = self . endTimesList [ 0 ]
else : # not looping
self . nIterationsLeft = self . nIterationsLeft - 1
if self . nIterationsLeft == 0 : # done
self . state = PygAnimation . STOPPED
if self . callBack != None : # if there is a callBack
self . callBack ( self . nickname )
# do it
returnValue = True
# animation has ended
else : # another iteration - start over again
self . playingStartTime = time . time ( )
self . nextElapsedThreshold = self . endTimesList [ 0 ]
self . index = 0
elif self . elapsed > self . nextElapsedThreshold : # Time to move on to next picture
self . index = self . index + 1
self . nextElapsedThreshold = self . endTimesList [ self . index ]
return returnValue |
def add_permalink_methods ( content_inst ) :
'''Add permalink methods to object''' | for permalink_method in PERMALINK_METHODS :
setattr ( content_inst , permalink_method . __name__ , permalink_method . __get__ ( content_inst , content_inst . __class__ ) ) |
def parse_messages_by_stream ( messages_by_stream ) :
"""Parse messages returned by stream
Messages returned by XREAD arrive in the form :
[ stream _ name ,
[ message _ id , [ key1 , value1 , key2 , value2 , . . . ] ] ,
Here we parse this into ( with the help of the above parse _ messages ( )
function ) :
[ stream _ name , message _ id , OrderedDict (
( key1 , value1 ) ,
( key2 , value2 ) , .""" | if messages_by_stream is None :
return [ ]
parsed = [ ]
for stream , messages in messages_by_stream :
for message_id , fields in parse_messages ( messages ) :
parsed . append ( ( stream , message_id , fields ) )
return parsed |
def swo_supported_speeds ( self , cpu_speed , num_speeds = 3 ) :
"""Retrives a list of SWO speeds supported by both the target and the
connected J - Link .
The supported speeds are returned in order from highest to lowest .
Args :
self ( JLink ) : the ` ` JLink ` ` instance
cpu _ speed ( int ) : the target ' s CPU speed in Hz
num _ speeds ( int ) : the number of compatible speeds to return
Returns :
A list of compatible SWO speeds in Hz in order from highest to lowest .""" | buf_size = num_speeds
buf = ( ctypes . c_uint32 * buf_size ) ( )
res = self . _dll . JLINKARM_SWO_GetCompatibleSpeeds ( cpu_speed , 0 , buf , buf_size )
if res < 0 :
raise errors . JLinkException ( res )
return list ( buf ) [ : res ] |
def _validate_auth_scheme ( self , req ) :
"""Check if the request has auth & the proper scheme
Remember NOT to include the error related info in
the WWW - Authenticate header for these conditions .
: raise :
AuthRequired""" | if not req . auth :
raise AuthRequired ( ** { 'detail' : 'You must first login to access the requested ' 'resource(s). Please retry your request using ' 'OAuth 2.0 Bearer Token Authentication as ' 'documented in RFC 6750. If you do not have an ' 'access_token then request one at the token ' 'endpdoint of: %s' % self . token_endpoint , 'headers' : self . _error_headers , 'links' : 'tools.ietf.org/html/rfc6750#section-2.1' , } )
elif req . auth_scheme != 'bearer' :
raise AuthRequired ( ** { 'detail' : 'Your Authorization header is using an unsupported ' 'authentication scheme. Please modify your scheme ' 'to be a string of: "Bearer".' , 'headers' : self . _error_headers , 'links' : 'tools.ietf.org/html/rfc6750#section-2.1' , } ) |
def read ( self , n = 4096 ) :
"""Return ` n ` bytes of data from the Stream , or None at end of stream .""" | while True :
try :
if hasattr ( self . fd , 'recv' ) :
return self . fd . recv ( n )
return os . read ( self . fd . fileno ( ) , n )
except EnvironmentError as e :
if e . errno not in Stream . ERRNO_RECOVERABLE :
raise e |
def _get_roots ( self ) :
"""Return URL roots for this storage .
Returns :
tuple of str or re . Pattern : URL roots""" | region = self . _get_session ( ) . region_name or r'[\w-]+'
return ( # S3 scheme
# - s3 : / / < bucket > / < key >
's3://' , # Virtual - hosted – style URL
# - http : / / < bucket > . s3 . amazonaws . com / < key >
# - https : / / < bucket > . s3 . amazonaws . com / < key >
# - http : / / < bucket > . s3 - < region > . amazonaws . com / < key >
# - https : / / < bucket > . s3 - < region > . amazonaws . com / < key >
_re . compile ( r'https?://[\w.-]+\.s3\.amazonaws\.com' ) , _re . compile ( r'https?://[\w.-]+\.s3-%s\.amazonaws\.com' % region ) , # Path - hosted – style URL
# - http : / / s3 . amazonaws . com / < bucket > / < key >
# - https : / / s3 . amazonaws . com / < bucket > / < key >
# - http : / / s3 - < region > . amazonaws . com / < bucket > / < key >
# - https : / / s3 - < region > . amazonaws . com / < bucket > / < key >
_re . compile ( r'https?://s3\.amazonaws\.com' ) , _re . compile ( r'https?://s3-%s\.amazonaws\.com' % region ) , # Transfer acceleration URL
# - http : / / < bucket > . s3 - accelerate . amazonaws . com
# - https : / / < bucket > . s3 - accelerate . amazonaws . com
# - http : / / < bucket > . s3 - accelerate . dualstack . amazonaws . com
# - https : / / < bucket > . s3 - accelerate . dualstack . amazonaws . com
_re . compile ( r'https?://[\w.-]+\.s3-accelerate\.amazonaws\.com' ) , _re . compile ( r'https?://[\w.-]+\.s3-accelerate\.dualstack' r'\.amazonaws\.com' ) ) |
def put ( self , path = None , url_kwargs = None , ** kwargs ) :
"""Sends a PUT request .
: param path :
The HTTP path ( either absolute or relative ) .
: param url _ kwargs :
Parameters to override in the generated URL . See ` ~ hyperlink . URL ` .
: param * * kwargs :
Optional arguments that ` ` request ` ` takes .
: return : response object""" | return self . _session . put ( self . _url ( path , url_kwargs ) , ** kwargs ) |
def return_env ( self , exists = True ) :
"""Return environment dict .
Parameters
exists : bool
It True , only return existing paths .""" | env = dict ( include = self . _build_paths ( 'include' , [ self . VCIncludes , self . OSIncludes , self . UCRTIncludes , self . NetFxSDKIncludes ] , exists ) , lib = self . _build_paths ( 'lib' , [ self . VCLibraries , self . OSLibraries , self . FxTools , self . UCRTLibraries , self . NetFxSDKLibraries ] , exists ) , libpath = self . _build_paths ( 'libpath' , [ self . VCLibraries , self . FxTools , self . VCStoreRefs , self . OSLibpath ] , exists ) , path = self . _build_paths ( 'path' , [ self . VCTools , self . VSTools , self . VsTDb , self . SdkTools , self . SdkSetup , self . FxTools , self . MSBuild , self . HTMLHelpWorkshop , self . FSharp ] , exists ) , )
if self . vc_ver >= 14 and os . path . isfile ( self . VCRuntimeRedist ) :
env [ 'py_vcruntime_redist' ] = self . VCRuntimeRedist
return env |
def _get_csrf_disabled_param ( ) :
"""Return the right param to disable CSRF depending on WTF - Form version .
From Flask - WTF 0.14.0 , ` csrf _ enabled ` param has been deprecated in favor of
` meta = { csrf : True / False } ` .""" | import flask_wtf
from pkg_resources import parse_version
supports_meta = parse_version ( flask_wtf . __version__ ) >= parse_version ( "0.14.0" )
return dict ( meta = { 'csrf' : False } ) if supports_meta else dict ( csrf_enabled = False ) |
def at ( self , step ) :
"""Return a TimeMachine for the same object at a different time .
Takes an integer argument representing the id field of an Action .
Returns the TimeMachine at the time of that Action . ( Less ambiguously :
at the time right after the Action .""" | return TimeMachine ( self . uid , step = step , info = copy . deepcopy ( self . info ) ) |
def detect_function_shadowing ( contracts , direct_shadowing = True , indirect_shadowing = True ) :
"""Detects all overshadowing and overshadowed functions in the provided contracts .
: param contracts : The contracts to detect shadowing within .
: param direct _ shadowing : Include results from direct inheritance / inheritance ancestry .
: param indirect _ shadowing : Include results from indirect inheritance collisions as a result of multiple
inheritance / c3 linearization .
: return : Returns a set of tuples ( contract _ scope , overshadowing _ contract , overshadowing _ function ,
overshadowed _ contract , overshadowed _ function ) , where :
- The contract _ scope defines where the detection of shadowing is most immediately found .
- For each contract - function pair , contract is the first contract where the function is seen , while the function
refers to the actual definition . The function does not need to be defined in the contract ( could be inherited ) .""" | results = set ( )
for contract in contracts : # Detect immediate inheritance shadowing .
if direct_shadowing :
shadows = detect_direct_function_shadowing ( contract )
for ( overshadowing_function , overshadowed_base_contract , overshadowed_function ) in shadows :
results . add ( ( contract , contract , overshadowing_function , overshadowed_base_contract , overshadowed_function ) )
# Detect c3 linearization shadowing ( multi inheritance shadowing ) .
if indirect_shadowing :
shadows = detect_c3_function_shadowing ( contract )
for colliding_functions in shadows :
for x in range ( 0 , len ( colliding_functions ) - 1 ) :
for y in range ( x + 1 , len ( colliding_functions ) ) : # The same function definition can appear more than once in the inheritance chain ,
# overshadowing items between , so it is important to remember to filter it out here .
if colliding_functions [ y ] [ 1 ] . contract != colliding_functions [ x ] [ 1 ] . contract :
results . add ( ( contract , colliding_functions [ y ] [ 0 ] , colliding_functions [ y ] [ 1 ] , colliding_functions [ x ] [ 0 ] , colliding_functions [ x ] [ 1 ] ) )
return results |
def exists ( self ) :
"""A convenience method that returns True if a record
satisfying the query exists""" | return self . rpc_model . search_count ( self . domain , context = self . context ) > 0 |
def just_log ( * texts , sep = "" ) :
"""Log a text without adding the current time .""" | if config . silent :
return
text = _color_sep + "default" + _color_sep2 + sep . join ( texts )
array = text . split ( _color_sep )
for part in array :
parts = part . split ( _color_sep2 , 1 )
if len ( parts ) != 2 or not parts [ 1 ] :
continue
if not config . color :
print ( parts [ 1 ] , end = '' )
else :
colors . foreground ( parts [ 0 ] )
print ( parts [ 1 ] , end = '' , flush = colors . is_win32 )
if config . color :
colors . foreground ( "default" )
print ( ) |
def rnd_date_array ( self , size , start = date ( 1970 , 1 , 1 ) , end = date . today ( ) ) :
"""Array or Matrix of random date generator .""" | if isinstance ( start , string_types ) :
start = self . str2date ( start )
if isinstance ( end , string_types ) :
end = self . str2date ( end )
if start > end :
raise ValueError ( "start time has to be earlier than end time" )
return self . randn ( size , self . _rnd_date , start , end ) |
def disapproveproposal ( ctx , proposal , account ) :
"""Disapprove a proposal""" | print_tx ( ctx . bitshares . disapproveproposal ( proposal , account = account ) ) |
def from_short_lines_text ( self , text : str ) :
"""Famous example from Hávamál 77
> > > text = " Deyr fé , \\ ndeyja frændr , \\ ndeyr sjalfr it sama , \\ nek veit einn , \\ nat aldrei deyr : \\ ndómr um dauðan hvern . "
> > > lj = Ljoodhhaattr ( )
> > > lj . from _ short _ lines _ text ( text )
> > > [ sl . text for sl in lj . short _ lines ]
[ ' Deyr fé , ' , ' deyja frændr , ' , ' deyr sjalfr it sama , ' , ' ek veit einn , ' , ' at aldrei deyr : ' , ' dómr um dauðan hvern . ' ]
> > > [ [ sl . text for sl in long _ line ] for long _ line in lj . long _ lines ]
[ [ ' Deyr fé , ' , ' deyja frændr , ' ] , [ ' deyr sjalfr it sama , ' ] , [ ' ek veit einn , ' , ' at aldrei deyr : ' ] , [ ' dómr um dauðan hvern . ' ] ]
: param text :
: return :""" | Metre . from_short_lines_text ( self , text )
lines = [ line for line in text . split ( "\n" ) if line ]
self . short_lines = [ ShortLine ( lines [ 0 ] ) , ShortLine ( lines [ 1 ] ) , LongLine ( lines [ 2 ] ) , ShortLine ( lines [ 3 ] ) , ShortLine ( lines [ 4 ] ) , LongLine ( lines [ 5 ] ) ]
self . long_lines = [ self . short_lines [ 0 : 2 ] , [ self . short_lines [ 2 ] ] , self . short_lines [ 3 : 5 ] , [ self . short_lines [ 5 ] ] ] |
def _write_wrapped ( self , line , sep = " " , indent = "" , width = 78 ) :
"""Word - wrap a line of RiveScript code for being written to a file .
: param str line : The original line of text to word - wrap .
: param str sep : The word separator .
: param str indent : The indentation to use ( as a set of spaces ) .
: param int width : The character width to constrain each line to .
: return str : The reformatted line ( s ) .""" | words = line . split ( sep )
lines = [ ]
line = ""
buf = [ ]
while len ( words ) :
buf . append ( words . pop ( 0 ) )
line = sep . join ( buf )
if len ( line ) > width : # Need to word wrap !
words . insert ( 0 , buf . pop ( ) )
# Undo
lines . append ( sep . join ( buf ) )
buf = [ ]
line = ""
# Straggler ?
if line :
lines . append ( line )
# Returned output
result = lines . pop ( 0 )
if len ( lines ) :
eol = ""
if sep == " " :
eol = "\s"
for item in lines :
result += eol + "\n" + indent + "^ " + item
return result |
def cli ( env ) :
"""Get Event Log Types""" | mgr = SoftLayer . EventLogManager ( env . client )
event_log_types = mgr . get_event_log_types ( )
table = formatting . Table ( COLUMNS )
for event_log_type in event_log_types :
table . add_row ( [ event_log_type ] )
env . fout ( table ) |
def count_table ( * keys ) :
"""count the number of times each key occurs in the input set
Arguments
keys : tuple of indexable objects , each having the same number of items
Returns
unique : tuple of ndarray , [ groups , . . . ]
unique keys for each input item
they form the axes labels of the table
table : ndarray , [ keys [ 0 ] . groups , . . . keys [ n ] . groups ] , int
the number of times each key - combination occurs in the input set
Notes
Equivalent to R ' s pivot table or pandas ' crosstab '
Alternatively , dense equivalent of the count function
Should we add weights option ?
Or better yet ; what about general reductions over key - grids ?""" | indices = [ as_index ( k , axis = 0 ) for k in keys ]
uniques = [ i . unique for i in indices ]
inverses = [ i . inverse for i in indices ]
shape = [ i . groups for i in indices ]
table = np . zeros ( shape , np . int )
np . add . at ( table , inverses , 1 )
return tuple ( uniques ) , table |
def add ( user_id , resource_policy , admin , inactive , rate_limit ) :
'''Add a new keypair .
USER _ ID : User ID of a new key pair .
RESOURCE _ POLICY : resource policy for new key pair .''' | try :
user_id = int ( user_id )
except ValueError :
pass
# string - based user ID for Backend . AI v1.4 +
with Session ( ) as session :
try :
data = session . KeyPair . create ( user_id , is_active = not inactive , is_admin = admin , resource_policy = resource_policy , rate_limit = rate_limit )
except Exception as e :
print_error ( e )
sys . exit ( 1 )
if not data [ 'ok' ] :
print_fail ( 'KeyPair creation has failed: {0}' . format ( data [ 'msg' ] ) )
sys . exit ( 1 )
item = data [ 'keypair' ]
print ( 'Access Key: {0}' . format ( item [ 'access_key' ] ) )
print ( 'Secret Key: {0}' . format ( item [ 'secret_key' ] ) ) |
def _update_dict ( self , initial : JSON , other : Mapping ) -> JSON :
"""Recursively update a dictionary .
: param initial : Dict to update .
: param other : Dict to update from .
: return : Updated dict .""" | for key , value in other . items ( ) :
if isinstance ( value , collections . Mapping ) :
r = self . _update_dict ( initial . get ( key , { } ) , value )
initial [ key ] = r
else :
initial [ key ] = other [ key ]
return initial |
def preloop ( self ) :
'''Executed before the command loop starts .''' | script_dir = os . path . dirname ( os . path . realpath ( __file__ ) )
help_dir = os . path . join ( script_dir , HELP_DIR_NAME )
self . load_forth_commands ( help_dir )
self . load_shell_commands ( help_dir ) |
def get_endpoint_w_server_list ( endpoint_id ) :
"""A helper for handling endpoint server list lookups correctly accounting
for various endpoint types .
- Raises click . UsageError when used on Shares
- Returns ( < get _ endpoint _ response > , " S3 " ) for S3 endpoints
- Returns ( < get _ endpoint _ response > , < server _ list _ response > ) for all other
Endpoints""" | client = get_client ( )
endpoint = client . get_endpoint ( endpoint_id )
if endpoint [ "host_endpoint_id" ] : # not GCS - - this is a share endpoint
raise click . UsageError ( dedent ( u"""\
{id} ({0}) is a share and does not have servers.
To see details of the share, use
globus endpoint show {id}
To list the servers on the share's host endpoint, use
globus endpoint server list {host_endpoint_id}
""" ) . format ( display_name_or_cname ( endpoint ) , ** endpoint . data ) )
if endpoint [ "s3_url" ] : # not GCS - - legacy S3 endpoint type
return ( endpoint , "S3" )
else :
return ( endpoint , client . endpoint_server_list ( endpoint_id ) ) |
def contains ( bank , key , cachedir ) :
'''Checks if the specified bank contains the specified key .''' | if key is None :
base = os . path . join ( cachedir , os . path . normpath ( bank ) )
return os . path . isdir ( base )
else :
keyfile = os . path . join ( cachedir , os . path . normpath ( bank ) , '{0}.p' . format ( key ) )
return os . path . isfile ( keyfile ) |
def print_error_to_io_stream ( err : Exception , io : TextIOBase , print_big_traceback : bool = True ) :
"""Utility method to print an exception ' s content to a stream
: param err :
: param io :
: param print _ big _ traceback :
: return :""" | if print_big_traceback :
traceback . print_tb ( err . __traceback__ , file = io , limit = - GLOBAL_CONFIG . multiple_errors_tb_limit )
else :
traceback . print_tb ( err . __traceback__ , file = io , limit = - 1 )
io . writelines ( ' ' + str ( err . __class__ . __name__ ) + ' : ' + str ( err ) ) |
def hashkey ( * args , ** kwargs ) :
"""Return a cache key for the specified hashable arguments .""" | if kwargs :
return _HashedTuple ( args + sum ( sorted ( kwargs . items ( ) ) , _kwmark ) )
else :
return _HashedTuple ( args ) |
def render ( self , namespace ) :
'''Render template lines .
Note : we only need to parse the namespace if we used variables in
this part of the template .''' | return self . _text . format_map ( namespace . dictionary ) if self . _need_format else self . _text |
def populate ( self , priority , address , rtr , data ) :
""": return : None""" | assert isinstance ( data , bytes )
self . needs_low_priority ( priority )
self . needs_no_rtr ( rtr )
self . needs_data ( data , 4 )
self . set_attributes ( priority , address , rtr )
self . closed = self . byte_to_channels ( data [ 0 ] )
self . led_on = self . byte_to_channels ( data [ 1 ] )
self . led_slow_blinking = self . byte_to_channels ( data [ 2 ] )
self . led_fast_blinking = self . byte_to_channels ( data [ 3 ] ) |
def Column ( self , column_name ) :
"""Iterates over values of a given column .
Args :
column _ name : A nome of the column to retrieve the values for .
Yields :
Values of the specified column .
Raises :
KeyError : If given column is not present in the table .""" | column_idx = None
for idx , column in enumerate ( self . header . columns ) :
if column . name == column_name :
column_idx = idx
break
if column_idx is None :
raise KeyError ( "Column '{}' not found" . format ( column_name ) )
for row in self . rows :
yield row . values [ column_idx ] |
def connect ( self , successor ) :
"""Connect this node to its successor node by
setting its outgoing and the successors ingoing .""" | if isinstance ( self , ConnectToExitNode ) and not isinstance ( successor , EntryOrExitNode ) :
return
self . outgoing . append ( successor )
successor . ingoing . append ( self ) |
def add_constraint ( self , func , variables , default_values = None ) :
"""Adds a constraint that applies to one or more variables .
The function must return true or false to indicate which combinations
of variable values are valid .""" | self . _constraints . append ( ( func , variables , default_values or ( ) ) ) |
async def deploy ( self , charm , series , application , options , constraints , storage , endpoint_bindings , * args ) :
""": param charm string :
Charm holds the URL of the charm to be used to deploy this
application .
: param series string :
Series holds the series of the application to be deployed
if the charm default is not sufficient .
: param application string :
Application holds the application name .
: param options map [ string ] interface { } :
Options holds application options .
: param constraints string :
Constraints holds the optional application constraints .
: param storage map [ string ] string :
Storage holds the optional storage constraints .
: param endpoint _ bindings map [ string ] string :
EndpointBindings holds the optional endpoint bindings
: param devices map [ string ] string :
Devices holds the optional devices constraints .
( Only given on Juju 2.5 + )
: param resources map [ string ] int :
Resources identifies the revision to use for each resource
of the application ' s charm .
: param num _ units int :
NumUnits holds the number of units required . For IAAS models , this
will be 0 and separate AddUnitChanges will be used . For Kubernetes
models , this will be used to scale the application .
( Only given on Juju 2.5 + )""" | # resolve indirect references
charm = self . resolve ( charm )
if len ( args ) == 1 : # Juju 2.4 and below only sends the resources
resources = args [ 0 ]
devices , num_units = None , None
else : # Juju 2.5 + sends devices before resources , as well as num _ units
# There might be placement but we need to ignore that .
devices , resources , num_units = args [ : 3 ]
if not charm . startswith ( 'local:' ) :
resources = await self . model . _add_store_resources ( application , charm , overrides = resources )
await self . model . _deploy ( charm_url = charm , application = application , series = series , config = options , constraints = constraints , endpoint_bindings = endpoint_bindings , resources = resources , storage = storage , devices = devices , num_units = num_units , )
return application |
def _send_locked ( self , cmd ) :
"""Sends the specified command to the lutron controller .
Assumes self . _ lock is held .""" | _LOGGER . debug ( "Sending: %s" % cmd )
try :
self . _telnet . write ( cmd . encode ( 'ascii' ) + b'\r\n' )
except BrokenPipeError :
self . _disconnect_locked ( ) |
def validate_email ( self , email_address ) :
'''a method to validate an email address
: param email _ address : string with email address to validate
: return : dictionary with validation fields in response _ details [ ' json ' ]''' | title = '%s.validate_email' % __class__ . __name__
# validate inputs
object_title = '%s(email_address="")' % title
email_address = self . fields . validate ( email_address , '.email_address' , object_title )
# construct request _ kwargs
request_kwargs = { 'url' : '%s/address/validate' % self . api_endpoint , 'params' : { 'address' : email_address } }
# send request
response_details = self . _get_request ( ** request_kwargs )
return response_details |
def updateFile ( cls , file_ , url ) :
"""Check and update file compares with remote _ url
Args :
file _ : str . Local filename . Normally it ' s _ _ file _ _
url : str . Remote url of raw file content . Normally it ' s https : / / raw . github . com / . . .
Returns :
bool : file updated or not""" | def compare ( s1 , s2 ) :
return s1 == s2 , len ( s2 ) - len ( s1 )
if not url or not file_ :
return False
try :
req = urllib . request . urlopen ( url )
raw_codes = req . read ( )
with open ( file_ , 'rb' ) as f :
current_codes = f . read ( ) . replace ( b'\r' , b'' )
is_same , diff = compare ( current_codes , raw_codes )
if is_same :
cit . info ( "{} is already up-to-date." . format ( file_ ) )
return False
else :
cit . ask ( "A new version is available. Update? (Diff: {})" . format ( diff ) )
if cit . get_choice ( [ 'Yes' , 'No' ] ) == 'Yes' :
with open ( file_ , 'wb' ) as f :
f . write ( raw_codes )
cit . info ( "Update Success." )
return True
else :
cit . warn ( "Update Canceled" )
return False
except Exception as e :
cit . err ( "{f} update failed: {e}" . format ( f = file_ , e = e ) )
return False |
def named_function ( name ) :
"""Gets a fully named module - global object .""" | name_parts = name . split ( '.' )
module = named_object ( '.' . join ( name_parts [ : - 1 ] ) )
func = getattr ( module , name_parts [ - 1 ] )
if hasattr ( func , 'original_func' ) :
func = func . original_func
return func |
def _add_ssh_key ( ret ) :
'''Setups the salt - ssh minion to be accessed with salt - ssh default key''' | priv = None
if __opts__ . get ( 'ssh_use_home_key' ) and os . path . isfile ( os . path . expanduser ( '~/.ssh/id_rsa' ) ) :
priv = os . path . expanduser ( '~/.ssh/id_rsa' )
else :
priv = __opts__ . get ( 'ssh_priv' , os . path . abspath ( os . path . join ( __opts__ [ 'pki_dir' ] , 'ssh' , 'salt-ssh.rsa' ) ) )
if priv and os . path . isfile ( priv ) :
ret [ 'priv' ] = priv |
def _list_request ( self ) :
"""Returns a dictionary with JMX domain names as keys""" | try : # https : / / jolokia . org / reference / html / protocol . html
# A maxDepth of 1 restricts the return value to a map with the JMX
# domains as keys . The values of the maps don ' t have any meaning
# and are dummy values .
# maxCollectionSize = 0 means " unlimited " . This works around an issue
# prior to Jolokia 1.3 where results were truncated at 1000
url = "http://%s:%s/%s%s?maxDepth=1&maxCollectionSize=0" % ( self . config [ 'host' ] , self . config [ 'port' ] , self . jolokia_path , self . LIST_URL )
# need some time to process the downloaded metrics , so that ' s why
# timeout is lower than the interval .
timeout = max ( 2 , float ( self . config [ 'interval' ] ) * 2 / 3 )
with closing ( urllib2 . urlopen ( self . _create_request ( url ) , timeout = timeout ) ) as response :
return self . _read_json ( response )
except ( urllib2 . HTTPError , ValueError ) as e :
self . log . error ( 'Unable to read JSON response: %s' , str ( e ) )
return { } |
def create ( self , unique_name , default_ttl = values . unset , callback_url = values . unset , geo_match_level = values . unset , number_selection_behavior = values . unset , intercept_callback_url = values . unset , out_of_session_callback_url = values . unset , chat_instance_sid = values . unset ) :
"""Create a new ServiceInstance
: param unicode unique _ name : An application - defined string that uniquely identifies the resource
: param unicode default _ ttl : Default TTL for a Session , in seconds
: param unicode callback _ url : The URL we should call when the interaction status changes
: param ServiceInstance . GeoMatchLevel geo _ match _ level : Where a proxy number must be located relative to the participant identifier
: param ServiceInstance . NumberSelectionBehavior number _ selection _ behavior : The preference for Proxy Number selection for the Service instance
: param unicode intercept _ callback _ url : The URL we call on each interaction
: param unicode out _ of _ session _ callback _ url : The URL we call when an inbound call or SMS action occurs on a closed or non - existent Session
: param unicode chat _ instance _ sid : The SID of the Chat Service Instance
: returns : Newly created ServiceInstance
: rtype : twilio . rest . proxy . v1 . service . ServiceInstance""" | data = values . of ( { 'UniqueName' : unique_name , 'DefaultTtl' : default_ttl , 'CallbackUrl' : callback_url , 'GeoMatchLevel' : geo_match_level , 'NumberSelectionBehavior' : number_selection_behavior , 'InterceptCallbackUrl' : intercept_callback_url , 'OutOfSessionCallbackUrl' : out_of_session_callback_url , 'ChatInstanceSid' : chat_instance_sid , } )
payload = self . _version . create ( 'POST' , self . _uri , data = data , )
return ServiceInstance ( self . _version , payload , ) |
def connect ( self , forceReconnect = False ) :
"""Check current conditions and initiate connection if possible .
This is called to check preconditions for starting a new connection ,
and initating the connection itself .
If the service is not running , this will do nothing .
@ param forceReconnect : Drop an existing connection to reconnnect .
@ type forceReconnect : C { False }
@ raises L { ConnectError } : When a connection ( attempt ) is already in
progress , unless C { forceReconnect } is set .
@ raises L { NoConsumerError } : When there is no consumer for incoming
tweets . No further connection attempts will be made , unless L { connect }
is called again .""" | if self . _state == 'stopped' :
raise Error ( "This service is not running. Not connecting." )
if self . _state == 'connected' :
if forceReconnect :
self . _toState ( 'disconnecting' )
return True
else :
raise ConnectError ( "Already connected." )
elif self . _state == 'aborting' :
raise ConnectError ( "Aborting connection in progress." )
elif self . _state == 'disconnecting' :
raise ConnectError ( "Disconnect in progress." )
elif self . _state == 'connecting' :
if forceReconnect :
self . _toState ( 'aborting' )
return True
else :
raise ConnectError ( "Connect in progress." )
if self . delegate is None :
if self . _state != 'idle' :
self . _toState ( 'idle' )
raise NoConsumerError ( )
if self . _state == 'waiting' :
if self . _reconnectDelayedCall . called :
self . _reconnectDelayedCall = None
pass
else :
self . _reconnectDelayedCall . reset ( 0 )
return True
self . _toState ( 'connecting' )
return True |
def _safe_string ( self , source , encoding = 'utf-8' ) :
"""Convert unicode to string as gnomekeyring barfs on unicode""" | if not isinstance ( source , str ) :
return source . encode ( encoding )
return str ( source ) |
def view_fields ( self , * attributes , ** options ) :
"""Returns an : class : ` ordered dictionary < collections . OrderedDict > ` which
contains the ` ` { ' member name ' : field attribute } ` ` or the
` ` { ' member name ' : dict ( field attributes ) } ` ` pairs for each : class : ` Field `
* nested * in the ` Structure ` .
The * attributes * of each : class : ` Field ` for containers * nested * in the
` Structure ` are viewed as well ( chained method call ) .
: param str attributes : selected : class : ` Field ` attributes .
Fallback is the field : attr : ` ~ Field . value ` .
: keyword tuple fieldnames : sequence of dictionary keys for the selected
field * attributes * . Defaults to ` ` ( * attributes ) ` ` .
: keyword bool nested : if ` ` True ` ` all : class : ` Pointer ` fields nested in the
` Structure ` views their referenced : attr : ` ~ Pointer . data ` object field
attributes as well ( chained method call ) .""" | members = OrderedDict ( )
for name , item in self . items ( ) : # Container
if is_container ( item ) :
members [ name ] = item . view_fields ( * attributes , ** options )
# Pointer
elif is_pointer ( item ) and get_nested ( options ) :
members [ name ] = item . view_fields ( * attributes , ** options )
# Field
elif is_field ( item ) :
if attributes :
field_getter = attrgetter ( * attributes )
else :
field_getter = attrgetter ( 'value' )
if len ( attributes ) > 1 :
fieldnames = options . get ( 'fieldnames' , attributes )
members [ name ] = dict ( zip ( fieldnames , field_getter ( item ) ) )
else :
members [ name ] = field_getter ( item )
return members |
def target_data ( self ) :
"""The TargetData for this execution engine .""" | if self . _td is not None :
return self . _td
ptr = ffi . lib . LLVMPY_GetExecutionEngineTargetData ( self )
self . _td = targets . TargetData ( ptr )
self . _td . _owned = True
return self . _td |
def packet_read ( self ) :
"""Read packet from network .""" | bytes_received = 0
if self . sock == NC . INVALID_SOCKET :
return NC . ERR_NO_CONN
if self . in_packet . command == 0 :
ba_data , errnum , errmsg = nyamuk_net . read ( self . sock , 1 )
if errnum == 0 and len ( ba_data ) == 1 :
bytes_received += 1
byte = ba_data [ 0 ]
self . in_packet . command = byte
if self . as_broker :
if self . bridge is None and self . state == NC . CS_NEW and ( byte & 0xF0 ) != NC . CMD_CONNECT :
print "RETURN ERR_PROTOCOL"
return NC . ERR_PROTOCOL , bytes_received
else :
if errnum == errno . EAGAIN or errnum == errno . EWOULDBLOCK :
return NC . ERR_SUCCESS , bytes_received
elif errnum == 0 and len ( ba_data ) == 0 or errnum == errno . ECONNRESET :
return NC . ERR_CONN_LOST , bytes_received
else :
evt = event . EventNeterr ( errnum , errmsg )
self . push_event ( evt )
return NC . ERR_UNKNOWN , bytes_received
if not self . in_packet . have_remaining :
loop_flag = True
while loop_flag :
ba_data , errnum , errmsg = nyamuk_net . read ( self . sock , 1 )
if errnum == 0 and len ( ba_data ) == 1 :
byte = ba_data [ 0 ]
bytes_received += 1
self . in_packet . remaining_count += 1
if self . in_packet . remaining_count > 4 :
return NC . ERR_PROTOCOL , bytes_received
self . in_packet . remaining_length += ( byte & 127 ) * self . in_packet . remaining_mult
self . in_packet . remaining_mult *= 128
else :
if errnum == errno . EAGAIN or errnum == errno . EWOULDBLOCK :
return NC . ERR_SUCCESS , bytes_received
elif errnum == 0 and len ( ba_data ) == 0 or errnum == errno . ECONNRESET :
return NC . ERR_CONN_LOST , bytes_received
else :
evt = event . EventNeterr ( errnum , errmsg )
self . push_event ( evt )
return NC . ERR_UNKNOWN , bytes_received
if ( byte & 128 ) == 0 :
loop_flag = False
if self . in_packet . remaining_length > 0 :
self . in_packet . payload = bytearray ( self . in_packet . remaining_length )
if self . in_packet . payload is None :
return NC . ERR_NO_MEM , bytes_received
self . in_packet . to_process = self . in_packet . remaining_length
self . in_packet . have_remaining = True
if self . in_packet . to_process > 0 :
ba_data , errnum , errmsg = nyamuk_net . read ( self . sock , self . in_packet . to_process )
if errnum == 0 and len ( ba_data ) > 0 :
readlen = len ( ba_data )
bytes_received += readlen
for idx in xrange ( 0 , readlen ) :
self . in_packet . payload [ self . in_packet . pos ] = ba_data [ idx ]
self . in_packet . pos += 1
self . in_packet . to_process -= 1
else :
if errnum == errno . EAGAIN or errnum == errno . EWOULDBLOCK :
return NC . ERR_SUCCESS , bytes_received
elif errnum == 0 and len ( ba_data ) == 0 or errnum == errno . ECONNRESET :
return NC . ERR_CONN_LOST , bytes_received
else :
evt = event . EventNeterr ( errnum , errmsg )
self . push_event ( evt )
return NC . ERR_UNKNOWN , bytes_received
# all data for this packet is read
self . in_packet . pos = 0
ret = self . packet_handle ( )
self . in_packet . packet_cleanup ( )
self . last_msg_in = time . time ( )
return ret , bytes_received |
def send_image ( self , number , path , caption = None ) :
"""Send image message
: param str number : phone number with cc ( country code )
: param str path : image file path""" | return self . _send_media_path ( number , path , RequestUploadIqProtocolEntity . MEDIA_TYPE_IMAGE , caption ) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.