signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def create_sdist ( self , tag ) :
"""Create an sdist and return the full file path of the . tar . gz .""" | logger . info ( "Making tempdir for %s with tag %s..." , self . package , tag )
self . wrapper . vcs . checkout_from_tag ( tag )
# checkout _ from _ tag ( ) chdirs to a temp directory that we need to clean up
# later .
self . temp_tagdir = os . path . realpath ( os . getcwd ( ) )
logger . debug ( "Tag checkout placed in %s" , self . temp_tagdir )
python = sys . executable
logger . debug ( command ( "%s setup.py sdist" % python ) )
tarball = find_tarball ( self . temp_tagdir , self . package , tag )
return tarball |
def update_image_location ( self , timeline_json ) :
"""Update the image location .""" | if not timeline_json :
return False
# If we get a list of objects back ( likely )
# then we just want the first one as it should be the " newest "
if isinstance ( timeline_json , ( tuple , list ) ) :
timeline_json = timeline_json [ 0 ]
# Verify that the event code is of the " CAPTURE IMAGE " event
event_code = timeline_json . get ( 'event_code' )
if event_code != TIMELINE . CAPTURE_IMAGE [ 'event_code' ] :
raise AbodeException ( ( ERROR . CAM_TIMELINE_EVENT_INVALID ) )
# The timeline response has an entry for " file _ path " that acts as the
# location of the image within the Abode servers .
file_path = timeline_json . get ( 'file_path' )
if not file_path :
raise AbodeException ( ( ERROR . CAM_IMAGE_REFRESH_NO_FILE ) )
# Perform a " head " request for the image and look for a
# 302 Found response
url = CONST . BASE_URL + file_path
response = self . _abode . send_request ( "head" , url )
if response . status_code != 302 :
_LOGGER . warning ( "Unexected response code %s with body: %s" , str ( response . status_code ) , response . text )
raise AbodeException ( ( ERROR . CAM_IMAGE_UNEXPECTED_RESPONSE ) )
# The response should have a location header that is the actual
# location of the image stored on AWS
location = response . headers . get ( 'location' )
if not location :
raise AbodeException ( ( ERROR . CAM_IMAGE_NO_LOCATION_HEADER ) )
self . _image_url = location
return True |
def get_image ( self ) :
"""Get the image currently being displayed .
Returns
image : ` ~ ginga . AstroImage . AstroImage ` or ` ~ ginga . RGBImage . RGBImage `
Image object .""" | if self . _imgobj is not None : # quick optomization
return self . _imgobj . get_image ( )
canvas_img = self . get_canvas_image ( )
return canvas_img . get_image ( ) |
def to_identifier ( string ) :
"""Makes a python identifier ( perhaps an ugly one ) out of any string .
This isn ' t an isomorphic change , the original name can ' t be recovered
from the change in all cases , so it must be stored separately .
Examples :
> > > to _ identifier ( ' Alice \' s Restaurant ' ) - > ' Alice _ s _ Restaurant '
> > > to _ identifier ( ' # if ' ) - > ' if ' - > QuiltException
> > > to _ identifier ( ' 9foo ' ) - > ' n9foo '
: param string : string to convert
: returns : ` string ` , converted to python identifier if needed
: rtype : string""" | # Not really useful to expose as a CONSTANT , and python will compile and cache
result = re . sub ( r'[^0-9a-zA-Z_]' , '_' , string )
# compatibility with older behavior and tests , doesn ' t hurt anyways - - " _ " is a
# pretty useless name to translate to . With this , it ' ll raise an exception .
result = result . strip ( '_' )
if result and result [ 0 ] . isdigit ( ) :
result = "n" + result
if not is_identifier ( result ) :
raise QuiltException ( "Unable to generate Python identifier from name: {!r}" . format ( string ) )
return result |
def OnTextFont ( self , event ) :
"""Text font choice event handler""" | fontchoice_combobox = event . GetEventObject ( )
idx = event . GetInt ( )
try :
font_string = fontchoice_combobox . GetString ( idx )
except AttributeError :
font_string = event . GetString ( )
post_command_event ( self , self . FontMsg , font = font_string ) |
def visit_extslice ( self , node , parent ) :
"""visit an ExtSlice node by returning a fresh instance of it""" | newnode = nodes . ExtSlice ( parent = parent )
newnode . postinit ( [ self . visit ( dim , newnode ) for dim in node . dims ] )
return newnode |
def team_matches ( self , team , event = None , year = None , simple = False , keys = False ) :
"""Get list of matches team has participated in .
: param team : Team to get matches of .
: param year : Year to get matches from .
: param event : Event to get matches from .
: param simple : Get only vital data .
: param keys : Only get match keys rather than their full data .
: return : List of string keys or Match objects .""" | if event :
if keys :
return self . _get ( 'team/%s/event/%s/matches/keys' % ( self . team_key ( team ) , event ) )
else :
return [ Match ( raw ) for raw in self . _get ( 'team/%s/event/%s/matches%s' % ( self . team_key ( team ) , event , '/simple' if simple else '' ) ) ]
elif year :
if keys :
return self . _get ( 'team/%s/matches/%s/keys' % ( self . team_key ( team ) , year ) )
else :
return [ Match ( raw ) for raw in self . _get ( 'team/%s/matches/%s%s' % ( self . team_key ( team ) , year , '/simple' if simple else '' ) ) ] |
def start ( self ) :
"""Start the daemon""" | # Check for a pidfile to see if the daemon already runs
try :
pf = file ( self . pidfile , 'r' )
pid = int ( pf . read ( ) . strip ( ) )
pf . close ( )
os . kill ( pid , 0 )
except IOError :
pid = None
except OSError :
pid = None
if pid :
message = "pidfile %s already exist. Daemon already running?\n"
sys . stderr . write ( message % self . pidfile )
sys . exit ( 1 )
# Start the daemon
self . _daemonize ( ) |
def _zforce ( self , R , z , phi = 0. , t = 0. ) :
"""NAME :
_ zforce
PURPOSE :
evaluate the vertical force for this potential
INPUT :
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT :
the vertical force""" | if not self . isNonAxi :
phi = 0.
self . _compute_xyzforces ( R , z , phi , t )
return self . _cached_Fz |
def get_asset_lookup_session ( self , proxy = None ) :
"""Gets the ` ` OsidSession ` ` associated with the asset lookup service .
arg : proxy ( osid . proxy . Proxy ) : a proxy
return : ( osid . repository . AssetLookupSession ) - an
` ` AssetLookupSession ` `
raise : NullArgument - ` ` proxy ` ` is ` ` null ` `
raise : OperationFailed - unable to complete request
raise : Unimplemented - ` ` supports _ asset _ lookup ( ) ` ` is ` ` false ` `
* compliance : optional - - This method must be implemented if
` ` supports _ asset _ lookup ( ) ` ` is ` ` true ` ` . *""" | return AssetLookupSession ( self . _provider_manager . get_asset_lookup_session ( proxy ) , self . _config_map ) |
def goHome ( self ) :
"""Goes to the home url . If there is no home url specifically set , then this will go to the first url in the history . Otherwise , it will look to see if the home url is in the stack and go to that level , if the home url is not found , then it will be pushed to the top of the stack using the push method .""" | if not self . canGoBack ( ) :
return ''
if self . homeUrl ( ) :
self . push ( self . homeUrl ( ) )
self . _blockStack = True
self . _index = 0
self . emitCurrentChanged ( )
self . _blockStack = False
return self . currentUrl ( ) |
def left_node_intersection_join ( g , h ) :
"""Take the intersection over two graphs .
This intersection of two graphs is defined by the union of the sub - graphs induced over the intersection of their nodes
: param BELGraph g : A BEL graph
: param BELGraph h : A BEL graph
: rtype : BELGraph
Example usage :
> > > import pybel
> > > g = pybel . from _ path ( ' . . . ' )
> > > h = pybel . from _ path ( ' . . . ' )
> > > merged = left _ node _ intersection _ join ( g , h )""" | intersecting = set ( g ) . intersection ( set ( h ) )
g_inter = subgraph ( g , intersecting )
h_inter = subgraph ( h , intersecting )
left_full_join ( g_inter , h_inter )
return g_inter |
def assert_dimensionless ( value ) :
"""Tests for dimensionlessness of input .
If input is dimensionless but expressed as a Quantity , it returns the
bare value . If it not , it raised an error .""" | if isinstance ( value , Quantity ) :
value = value . simplified
if value . dimensionality == Dimensionality ( { } ) :
value = value . base . item ( )
else :
raise TypeError ( "Score value %s must be dimensionless" % value )
return value |
def save ( self ) :
"""Saves the changes from the ui to this widgets record instance .""" | record = self . record ( )
if not record :
logger . warning ( 'No record has been defined for %s.' % self )
return False
if not self . signalsBlocked ( ) :
self . aboutToSaveRecord . emit ( record )
self . aboutToSave . emit ( )
values = self . saveValues ( )
# ignore columns that are the same ( fixes bugs in encrypted columns )
check = values . copy ( )
for column_name , value in check . items ( ) :
try :
equals = value == record . recordValue ( column_name )
except UnicodeWarning :
equals = False
if equals :
check . pop ( column_name )
# check to see if nothing has changed
if not check and record . isRecord ( ) :
if not self . signalsBlocked ( ) :
self . recordSaved . emit ( record )
self . saved . emit ( )
self . _saveSignalBlocked = False
else :
self . _saveSignalBlocked = True
if self . autoCommitOnSave ( ) :
status , result = record . commit ( )
if status == 'errored' :
if 'db_error' in result :
msg = nativestring ( result [ 'db_error' ] )
else :
msg = 'An unknown database error has occurred.'
QMessageBox . information ( self , 'Commit Error' , msg )
return False
return True
# validate the modified values
success , msg = record . validateValues ( check )
if ( not success ) :
QMessageBox . information ( None , 'Could Not Save' , msg )
return False
record . setRecordValues ( ** values )
success , msg = record . validateRecord ( )
if not success :
QMessageBox . information ( None , 'Could Not Save' , msg )
return False
if ( self . autoCommitOnSave ( ) ) :
result = record . commit ( )
if 'errored' in result :
QMessageBox . information ( None , 'Could Not Save' , msg )
return False
if ( not self . signalsBlocked ( ) ) :
self . recordSaved . emit ( record )
self . saved . emit ( )
self . _saveSignalBlocked = False
else :
self . _saveSignalBlocked = True
return True |
def intersect ( a , b ) :
"""Check if two rectangles intersect""" | if a [ x0 ] == a [ x1 ] or a [ y0 ] == a [ y1 ] :
return False
if b [ x0 ] == b [ x1 ] or b [ y0 ] == b [ y1 ] :
return False
return a [ x0 ] <= b [ x1 ] and b [ x0 ] <= a [ x1 ] and a [ y0 ] <= b [ y1 ] and b [ y0 ] <= a [ y1 ] |
def next ( self ) :
"""Next point in iteration""" | x , y = next ( self . scan )
ca , sa = math . cos ( self . angle ) , math . sin ( self . angle )
xr = ca * x - sa * y
yr = sa * x + ca * y
return xr , yr |
def parse_resultsline ( self , line ) :
"""CSV Parser""" | splitted = [ token . strip ( ) for token in line . split ( ',' ) ]
if self . _end_header :
resid = splitted [ 0 ]
rawdict = { 'DefaultResult' : 'Soln Conc' }
rawdict . update ( dict ( zip ( self . _resultsheader , splitted ) ) )
date_string = "{Date} {Time}" . format ( ** rawdict )
date_time = DateTime ( date_string )
rawdict [ 'DateTime' ] = date_time
element = rawdict . get ( "Element" , "" ) . replace ( " " , "" ) . replace ( "." , "" )
# Set DefaultResult to 0.0 if result is " 0 " or " - - " or ' ' or ' ND '
result = rawdict [ rawdict [ 'DefaultResult' ] ]
column_name = rawdict [ 'DefaultResult' ]
result = self . get_result ( column_name , result , line )
rawdict [ rawdict [ 'DefaultResult' ] ] = result
val = re . sub ( r"\W" , "" , element )
self . _addRawResult ( resid , values = { val : rawdict } , override = False )
self . log ( "End of file reached successfully: ${total_objects} objects, " "${total_analyses} analyses, ${total_results} results" , mapping = { "total_objects" : self . getObjectsTotalCount ( ) , "total_analyses" : self . getAnalysesTotalCount ( ) , "total_results" : self . getResultsTotalCount ( ) } ) |
def run_in_parallel ( programs , nsamples , cxn , shuffle = True ) :
"""Take sequences of Protoquil programs on disjoint qubits and execute a single sequence of
programs that executes the input programs in parallel . Optionally randomize within each
qubit - specific sequence .
The programs are passed as a 2d array of Quil programs , where the ( first ) outer axis iterates
over disjoint sets of qubits that the programs involve and the inner axis iterates over a
sequence of related programs , e . g . , tomography sequences , on the same set of qubits .
: param Union [ np . ndarray , List [ List [ Program ] ] ] programs : A rectangular list of lists , or a 2d
array of Quil Programs . The outer list iterates over disjoint qubit groups as targets , the
inner list over programs to run on those qubits , e . g . , tomographic sequences .
: param int nsamples : Number of repetitions for executing each Program .
: param QPUConnection | QVMConnection cxn : The quantum machine connection .
: param bool shuffle : If True , the order of each qubit specific sequence ( 2nd axis ) is randomized
Default is True .
: return : An array of 2d arrays that provide bitstring histograms for each input program .
The axis of the outer array iterates over the disjoint qubit groups , the outer axis of the
inner 2d array iterates over the programs for that group and the inner most axis iterates
over all possible bitstrings for the qubit group under consideration .
: rtype np . array""" | if shuffle :
n_groups = len ( programs )
n_progs_per_group = len ( programs [ 0 ] )
permutations = np . outer ( np . ones ( n_groups , dtype = int ) , np . arange ( n_progs_per_group , dtype = int ) )
inverse_permutations = np . zeros_like ( permutations )
for jj in range ( n_groups ) : # in - place operation
np . random . shuffle ( permutations [ jj ] )
# store inverse permutation
inverse_permutations [ jj ] = np . argsort ( permutations [ jj ] )
# apply to programs
shuffled_programs = np . empty ( ( n_groups , n_progs_per_group ) , dtype = object )
for jdx , ( progsj , pj ) in enumerate ( zip ( programs , permutations ) ) :
shuffled_programs [ jdx ] = [ progsj [ pjk ] for pjk in pj ]
shuffled_results = _run_in_parallel ( shuffled_programs , nsamples , cxn )
# reverse shuffling of results
results = np . array ( [ resultsj [ pj ] for resultsj , pj in zip ( shuffled_results , inverse_permutations ) ] )
return results
else :
return _run_in_parallel ( programs , nsamples , cxn ) |
def recCopyElement ( oldelement ) :
"""Generates a copy of an xml element and recursively of all
child elements .
: param oldelement : an instance of lxml . etree . _ Element
: returns : a copy of the " oldelement "
. . warning : :
doesn ' t copy ` ` . text ` ` or ` ` . tail ` ` of xml elements""" | newelement = ETREE . Element ( oldelement . tag , oldelement . attrib )
if len ( oldelement . getchildren ( ) ) > 0 :
for childelement in oldelement . getchildren ( ) :
newelement . append ( recCopyElement ( childelement ) )
return newelement |
def bowtie2_general_stats_table ( self ) :
"""Take the parsed stats from the Bowtie 2 report and add it to the
basic stats table at the top of the report""" | headers = OrderedDict ( )
headers [ 'overall_alignment_rate' ] = { 'title' : '% Aligned' , 'description' : 'overall alignment rate' , 'max' : 100 , 'min' : 0 , 'suffix' : '%' , 'scale' : 'YlGn' }
self . general_stats_addcols ( self . bowtie2_data , headers ) |
def open ( self , image , use_seek = False ) :
"""Opens the file - like object of the given ` ` image ` ` .
Returned file - like object guarantees :
- context manager protocol
- : class : ` collections . abc . Iterable ` protocol
- : class : ` collections . abc . Iterator ` protocol
- : meth : ` ~ io . RawIOBase . read ( ) ` method
- : meth : ` ~ io . IOBase . readline ( ) ` method
- : meth : ` ~ io . IOBase . readlines ( ) ` method
To sum up : you definitely can read the file , in : keyword : ` with `
statement and : keyword : ` for ` loop .
Plus , if ` ` use _ seek ` ` option is : const : ` True ` :
- : meth : ` ~ io . IOBase . seek ( ) ` method
- : meth : ` ~ io . IOBase . tell ( ) ` method
For example , if you want to make a local copy of
the image : :
import shutil
with store . open ( image ) as src :
with open ( filename , ' wb ' ) as dst :
shutil . copyfileobj ( src , dst )
: param image : the image to get its file
: type image : : class : ` sqlalchemy _ imageattach . entity . Image `
: param use _ seek : whether the file should seekable .
if : const : ` True ` it maybe buffered in the memory .
default is : const : ` False `
: type use _ seek : : class : ` bool `
: returns : the file - like object of the image , which is a context
manager ( plus , also seekable only if ` ` use _ seek ` `
is : const : ` True ` )
: rtype : : class : ` file ` , : class : ` ~ sqlalchemy _ imageattach . file . FileProxy ` ,
file - like object
: raise IOError : when such file doesn ' t exist""" | from . entity import Image
if not isinstance ( image , Image ) :
raise TypeError ( 'image must be a sqlalchemy_imageattach.entity.' 'Image instance, not ' + repr ( image ) )
elif image . object_id is None :
raise TypeError ( 'image.object_id must be set; it is currently ' 'None however' )
elif not isinstance ( image . object_id , numbers . Integral ) :
raise TypeError ( 'image.object_id must be integer, not ' + repr ( image . object_id ) )
f = self . get_file ( image . object_type , image . object_id , image . width , image . height , image . mimetype )
for method in 'read' , 'readline' , 'readlines' :
if not callable ( getattr ( f , method , None ) ) :
raise TypeError ( '{0!r}.get_file() must return file-like object which ' 'has {1}() method, not {2!r}' . format ( self , method , f ) )
ctxt = ( callable ( getattr ( f , '__enter__' , None ) ) and callable ( getattr ( f , '__exit__' , None ) ) )
if use_seek :
if not callable ( getattr ( f , 'seek' , None ) ) :
f2 = io . BytesIO ( )
shutil . copyfileobj ( f , f2 )
f2 . seek ( 0 )
return f2
if ctxt :
return f
return SeekableFileProxy ( f )
if ctxt :
return f
return FileProxy ( f ) |
def download_parallel ( url , directory , idx , min_file_size = 0 , max_file_size = - 1 , no_redirects = False , pos = 0 , mode = 's' ) :
"""download function to download parallely""" | global main_it
global exit_flag
global total_chunks
global file_name
global i_max
file_name [ idx ] = url . split ( '/' ) [ - 1 ]
file_address = directory + '/' + file_name [ idx ]
is_redirects = not no_redirects
resp = s . get ( url , stream = True , allow_redirects = is_redirects )
if not resp . status_code == 200 : # ignore this file since server returns invalid response
exit_flag += 1
return
try :
total_size = int ( resp . headers [ 'content-length' ] )
except KeyError :
total_size = len ( resp . content )
total_chunks [ idx ] = total_size / chunk_size
if total_chunks [ idx ] < min_file_size : # ignore this file since file size is lesser than min _ file _ size
exit_flag += 1
return
elif max_file_size != - 1 and total_chunks [ idx ] > max_file_size : # ignore this file since file size is greater than max _ file _ size
exit_flag += 1
return
file_iterable = resp . iter_content ( chunk_size = chunk_size )
with open ( file_address , 'wb' ) as f :
for sno , data in enumerate ( file_iterable ) :
i_max [ idx ] = sno + 1
f . write ( data )
exit_flag += 1 |
def MS ( rads : float ) -> ops . XXPowGate :
"""The Mølmer – Sørensen gate , a native two - qubit operation in ion traps .
A rotation around the XX axis in the two - qubit bloch sphere .
The gate implements the following unitary :
exp ( - i t XX ) = [ cos ( t ) 0 0 - isin ( t ) ]
[ 0 cos ( t ) - isin ( t ) 0 ]
[ 0 - isin ( t ) cos ( t ) 0 ]
[ - isin ( t ) 0 0 cos ( t ) ]
Args :
rads : The rotation angle in radians .
Returns :
Mølmer – Sørensen gate rotating by the desired amount .""" | return ops . XXPowGate ( exponent = rads * 2 / np . pi , global_shift = - 0.5 ) |
def create_user ( cls , username , password , name , email ) :
"""utility class method to create a user""" | config = Config ( )
payload = { "username" : username , "email" : email , "name" : name , "password" : password , }
user_creation_resp = requests . post ( "https://users.{}/users/" . format ( config . host ) , json = payload )
if user_creation_resp . status_code != 201 :
raise errors . ResourceError ( "couldnt create user" ) |
def poll_open_file_languages ( self ) :
"""Get list of current opened files ' languages""" | languages = [ ]
for index in range ( self . get_stack_count ( ) ) :
languages . append ( self . tabs . widget ( index ) . language . lower ( ) )
return set ( languages ) |
def _get_kdjd ( cls , df , n_days ) :
"""Get the D of KDJ
D = 2/3 × ( prev . D ) + 1/3 × ( curr . K )
2/3 and 1/3 are the smooth parameters .
: param df : data
: param n _ days : calculation range
: return : None""" | k_column = 'kdjk_{}' . format ( n_days )
d_column = 'kdjd_{}' . format ( n_days )
df [ d_column ] = list ( cls . _calc_kd ( df . get ( k_column ) ) ) |
def remove_empty_text ( utterances : List [ Utterance ] ) -> List [ Utterance ] :
"""Remove empty utterances from a list of utterances
Args :
utterances : The list of utterance we are processing""" | return [ utter for utter in utterances if utter . text . strip ( ) != "" ] |
def get_element_by_attr_key ( self , attr_key , el_idx = 0 ) :
"""Args :
attr _ key : str
Name of attribute for which to search
el _ idx : int
Index of element to use as base in the event that there are multiple sibling
elements with the same name .
Returns :
Element containing an attribute key named ` ` attr _ key ` ` .""" | el_list = self . get_element_list_by_attr_key ( attr_key )
try :
return el_list [ el_idx ]
except IndexError :
raise SimpleXMLWrapperException ( 'Element with tag not found. tag_name="{}" requested_idx={} ' 'available_elements={}' . format ( attr_key , el_idx , len ( el_list ) ) ) |
def generateExecutable ( self , outpath = '.' , signed = False ) :
"""Generates the executable for this builder in the output path .
: param outpath | < str >""" | if not ( self . runtime ( ) or self . specfile ( ) ) :
return True
if not self . distributionPath ( ) :
return True
if os . path . exists ( self . distributionPath ( ) ) :
shutil . rmtree ( self . distributionPath ( ) )
if os . path . isfile ( self . sourcePath ( ) ) :
basepath = os . path . normpath ( os . path . dirname ( self . sourcePath ( ) ) )
else :
basepath = os . path . normpath ( self . sourcePath ( ) )
# store the plugin table of contents
self . generatePlugins ( basepath )
# generate the specfile if necessary
specfile = self . specfile ( )
# generate the spec file options
opts = { 'name' : self . name ( ) , 'exname' : self . executableName ( ) , 'product' : self . productName ( ) , 'runtime' : self . runtime ( ) , 'srcpath' : self . sourcePath ( ) , 'buildpath' : self . buildPath ( ) , 'hookpaths' : ',\n' . join ( wrap_str ( self . hookPaths ( ) ) ) , 'hiddenimports' : ',\n' . join ( wrap_str ( self . hiddenImports ( ) ) ) , 'distpath' : self . distributionPath ( ) , 'platform' : sys . platform , 'excludes' : ',\n' . join ( wrap_str ( self . executableExcludes ( ) ) ) }
if not specfile :
datasets = [ ]
for typ , data in self . executableData ( ) :
if typ == 'tree' :
args = { 'path' : data [ 0 ] , 'prefix' : data [ 1 ] , 'excludes' : ',' . join ( wrap_str ( data [ 2 ] ) ) }
datasets . append ( templ . SPECTREE . format ( ** args ) )
else :
args = { }
args . update ( data )
args . setdefault ( 'type' , typ )
datasets . append ( templ . SPECDATA . format ( ** args ) )
opts [ 'datasets' ] = '\n' . join ( datasets )
opts . update ( self . _executableOptions )
if self . executableCliName ( ) :
opts [ 'cliname' ] = self . executableCliName ( )
opts [ 'collect' ] = templ . SPECFILE_CLI . format ( ** opts )
else :
opts [ 'collect' ] = templ . SPECFILE_COLLECT . format ( ** opts )
if opts [ 'onefile' ] :
data = templ . SPECFILE_ONEFILE . format ( ** opts )
else :
data = templ . SPECFILE . format ( ** opts )
# generate the spec file for building
specfile = os . path . join ( self . buildPath ( ) , self . name ( ) + '.spec' )
f = open ( specfile , 'w' )
f . write ( data )
f . close ( )
cmd = os . path . expandvars ( self . executableOption ( 'cmd' ) )
success = cmdexec ( cmd . format ( spec = specfile ) ) == 0
if signed :
binfile = os . path . join ( opts [ 'distpath' ] , opts [ 'product' ] , opts [ 'exname' ] + '.exe' )
self . sign ( binfile )
return success |
def on_btn_add_fit ( self , event ) :
"""add a new interpretation to the current specimen
Parameters
event : the wx . ButtonEvent that triggered this function
Alters
pmag _ results _ data""" | if self . auto_save . GetValue ( ) :
self . current_fit = self . add_fit ( self . s , None , None , None , saved = True )
else :
self . current_fit = self . add_fit ( self . s , None , None , None , saved = False )
self . generate_warning_text ( )
self . update_warning_box ( )
if self . ie_open :
self . ie . update_editor ( )
self . update_fit_boxes ( True )
# Draw figures and add text
self . get_new_PCA_parameters ( event ) |
def commonancestors ( * nodes ) :
"""Determine common ancestors of ` nodes ` .
> > > from anytree import Node
> > > udo = Node ( " Udo " )
> > > marc = Node ( " Marc " , parent = udo )
> > > lian = Node ( " Lian " , parent = marc )
> > > dan = Node ( " Dan " , parent = udo )
> > > jet = Node ( " Jet " , parent = dan )
> > > jan = Node ( " Jan " , parent = dan )
> > > joe = Node ( " Joe " , parent = dan )
> > > commonancestors ( jet , joe )
( Node ( ' / Udo ' ) , Node ( ' / Udo / Dan ' ) )
> > > commonancestors ( jet , marc )
( Node ( ' / Udo ' ) , )
> > > commonancestors ( jet )
( Node ( ' / Udo ' ) , Node ( ' / Udo / Dan ' ) )
> > > commonancestors ( )""" | ancestors = [ node . ancestors for node in nodes ]
common = [ ]
for parentnodes in zip ( * ancestors ) :
parentnode = parentnodes [ 0 ]
if all ( [ parentnode is p for p in parentnodes [ 1 : ] ] ) :
common . append ( parentnode )
else :
break
return tuple ( common ) |
def resizeEvent ( self , event ) :
"""Overloads the resize event to handle updating of buttons .
: param event | < QResizeEvent >""" | super ( XLineEdit , self ) . resizeEvent ( event )
self . adjustButtons ( ) |
def is_widget_required_attribute ( widget ) :
"""Is this widget required ?""" | if not widget . is_required :
return False
if isinstance ( widget , WIDGETS_NO_REQUIRED ) :
return False
return True |
def forwards ( apps , schema_editor ) :
"""Change all Movie objects into Work objects , and their associated
data into WorkRole and WorkSelection models , then delete the Movie .""" | Movie = apps . get_model ( 'spectator_events' , 'Movie' )
Work = apps . get_model ( 'spectator_events' , 'Work' )
WorkRole = apps . get_model ( 'spectator_events' , 'WorkRole' )
WorkSelection = apps . get_model ( 'spectator_events' , 'WorkSelection' )
for m in Movie . objects . all ( ) :
work = Work . objects . create ( kind = 'movie' , title = m . title , title_sort = m . title_sort , year = m . year , imdb_id = m . imdb_id )
for role in m . roles . all ( ) :
WorkRole . objects . create ( creator = role . creator , work = work , role_name = role . role_name , role_order = role . role_order )
for selection in m . events . all ( ) :
WorkSelection . objects . create ( event = selection . event , work = work , order = selection . order )
m . delete ( ) |
def contains_merged_cell ( self ) :
"""True if one or more cells in range are part of a merged cell .""" | for tc in self . iter_tcs ( ) :
if tc . gridSpan > 1 :
return True
if tc . rowSpan > 1 :
return True
if tc . hMerge :
return True
if tc . vMerge :
return True
return False |
def removeextensibles ( self , key , name ) :
"""Remove extensible items in the object of key and name .
Only for internal use . # TODO : hide this
Parameters
key : str
The type of IDF object . This must be in ALL _ CAPS .
name : str
The name of the object to fetch .
Returns
EpBunch object""" | return removeextensibles ( self . idfobjects , self . model , self . idd_info , key , name ) |
def main ( ) :
"""Make queries against NCBI Taxa databases""" | # Get commandline args
args = get_args ( )
# Instantiate the ete NCBI taxa object
ncbi = NCBITaxa ( dbfile = args . database )
# # dbfile location
if args . verbose > 1 :
sys . stderr . write ( 'Taxa database is stored at {}\n' . format ( ncbi . dbfile ) )
# Update the database if required .
if args . update is True :
if args . verbose > 1 :
msg = 'Updating the taxonomy database. This may take several minutes...\n'
sys . stderr . write ( msg )
ncbi . update_taxonomy_database ( )
# If names were provided in taxid list , convert to taxids
args . taxid = args . taxid . replace ( '"' , '' ) . replace ( "'" , '' ) . split ( ',' )
args . taxid = name2taxid ( args . taxid , ncbi )
# Output
if args . outfile is None :
outFH = sys . stdout
else :
outFH = open ( args . outfile , 'w' )
# # header
if args . taxon_info :
outFH . write ( '\t' . join ( [ 'name' , 'taxid' , 'rank' , 'lineage' ] ) + '\n' )
elif not args . just_taxids :
outFH . write ( '\t' . join ( [ 'parent_taxid' , 'descendent_taxid' , 'descendent_name' ] ) + '\n' )
# # body
for taxid in args . taxid :
if args . taxon_info :
taxon_info ( taxid , ncbi , outFH )
else :
desc_taxa ( taxid , ncbi , outFH , args . just_taxids )
outFH . close ( ) |
def get_all_names ( ) -> Tuple [ str ] :
"""Retrieve a tuple of all known color names , basic and ' known names ' .""" | names = list ( basic_names )
names . extend ( name_data )
return tuple ( sorted ( set ( names ) ) ) |
async def delete_pattern ( self , pattern , count = None ) :
"""delete cache according to pattern in redis ,
delete ` count ` keys each time""" | cursor = '0'
count_deleted = 0
while cursor != 0 :
cursor , identities = await self . client . scan ( cursor = cursor , match = pattern , count = count )
count_deleted += await self . client . delete ( * identities )
return count_deleted |
def _function_signature ( func ) :
"""Return the signature of a callable as a string .
Parameters
func : callable
Function whose signature to extract .
Returns
sig : string
Signature of the function .""" | if sys . version_info . major > 2 : # Python 3 already implements this functionality
return func . __name__ + str ( inspect . signature ( func ) )
# In Python 2 we have to do it manually , unfortunately
spec = inspect . getargspec ( func )
posargs = spec . args
defaults = spec . defaults if spec . defaults is not None else [ ]
varargs = spec . varargs
kwargs = spec . keywords
deflen = 0 if defaults is None else len ( defaults )
nodeflen = 0 if posargs is None else len ( posargs ) - deflen
args = [ '{}' . format ( arg ) for arg in posargs [ : nodeflen ] ]
args . extend ( '{}={}' . format ( arg , dval ) for arg , dval in zip ( posargs [ nodeflen : ] , defaults ) )
if varargs :
args . append ( '*{}' . format ( varargs ) )
if kwargs :
args . append ( '**{}' . format ( kwargs ) )
argstr = ', ' . join ( args )
return '{}({})' . format ( func . __name__ , argstr ) |
def cvm_stat ( data ) :
"""Calculates the Cramer - von Mises statistic for sorted values from U ( 0 , 1 ) .""" | samples2 = 2 * len ( data )
minuends = arange ( 1 , samples2 , 2 ) / samples2
return 1 / ( 6 * samples2 ) + ( ( minuends - data ) ** 2 ) . sum ( ) |
def _analytical_fit_adjacent ( self , ref_dists ) :
"""Fit coords ( x , y , [ z ] ) so that distances from reference coordinates
match closest to reference distances""" | dists = ref_dists ** 2
rot_dists = self . _rotate_rows ( dists )
b = dists - rot_dists + self . _partial_b
self . _b = b
return self . _pinvA . dot ( b ) |
def internal_assert ( condition , message = None , item = None , extra = None ) :
"""Raise InternalException if condition is False .
If condition is a function , execute it on DEVELOP only .""" | if DEVELOP and callable ( condition ) :
condition = condition ( )
if not condition :
if message is None :
message = "assertion failed"
if item is None :
item = condition
raise CoconutInternalException ( message , item , extra ) |
def get_transaction_isolation ( self ) :
"""Returns the currently active transaction isolation level .
: return : the current transaction isolation level
: rtype : int""" | if self . _transaction_isolation_level is None :
self . _transaction_isolation_level = self . _platform . get_default_transaction_isolation_level ( )
return self . _transaction_isolation_level |
def from_array ( array ) :
"""Deserialize a new PassportData from a given dictionary .
: return : new PassportData instance .
: rtype : PassportData""" | if array is None or not array :
return None
# end if
assert_type_or_raise ( array , dict , parameter_name = "array" )
data = { }
data [ 'data' ] = EncryptedPassportElement . from_array_list ( array . get ( 'data' ) , list_level = 1 )
data [ 'credentials' ] = EncryptedCredentials . from_array ( array . get ( 'credentials' ) )
data [ '_raw' ] = array
return PassportData ( ** data ) |
def read_value ( self , varname , path = "/" , cmode = None , default = NO_DEFAULT ) :
"""Returns the values of variable with name varname in the group specified by path .
Args :
varname : Name of the variable
path : path to the group .
cmode : if cmode = = " c " , a complex ndarrays is constructed and returned
( netcdf does not provide native support from complex datatype ) .
default : returns default if varname is not present .
self . Error is raised if default is default is NO _ DEFAULT
Returns :
numpy array if varname represents an array , scalar otherwise .""" | try :
var = self . read_variable ( varname , path = path )
except self . Error :
if default is NO_DEFAULT :
raise
return default
if cmode is None : # scalar or array
# getValue is not portable !
try :
return var . getValue ( ) [ 0 ] if not var . shape else var [ : ]
except IndexError :
return var . getValue ( ) if not var . shape else var [ : ]
else :
assert var . shape [ - 1 ] == 2
if cmode == "c" :
return var [ ... , 0 ] + 1j * var [ ... , 1 ]
else :
raise ValueError ( "Wrong value for cmode %s" % cmode ) |
def condense ( ss_unescaped ) :
"""Given multiple strings , returns a compressed regular expression just
for these strings
> > > condense ( [ ' she ' , ' he ' , ' her ' , ' hemoglobin ' ] )
' he ( moglobin | r ) ? | she '""" | def estimated_len ( longg , short ) :
return ( 3 + len ( short ) + sum ( map ( len , longg ) ) - len ( longg ) * ( len ( short ) - 1 ) - 1 )
def stupid_len ( longg ) :
return sum ( map ( len , longg ) ) + len ( longg )
ss = [ re . escape ( s ) for s in set ( ss_unescaped ) ]
ss . sort ( key = len )
short2long = defaultdict ( lambda : { 'p' : [ ] , 's' : [ ] } )
for short , longg in combinations ( ss , 2 ) :
if longg . startswith ( short ) :
short2long [ short ] [ 'p' ] . append ( longg )
if longg . endswith ( short ) :
short2long [ short ] [ 's' ] . append ( longg )
short2long = sorted ( list ( short2long . items ( ) ) , key = lambda x : len ( x [ 0 ] ) , reverse = True )
output = [ ]
objs = set ( ss )
for s , pre_sur in short2long :
pp = set ( pre_sur [ 'p' ] ) & objs
ss = set ( pre_sur [ 's' ] ) & objs
if ( ( stupid_len ( pp ) - estimated_len ( pp , s ) ) < ( stupid_len ( ss ) - estimated_len ( ss , s ) ) ) :
reg = ( r'({heads})?{surfix}' . format ( surfix = s , heads = '|' . join ( sorted ( [ p [ : - len ( s ) ] for p in ss ] , key = len , reverse = True ) ) ) )
assert len ( reg ) == estimated_len ( ss , s )
output . append ( reg )
objs -= ( ss | set ( [ s ] ) )
elif ( ( stupid_len ( pp ) - estimated_len ( pp , s ) ) > ( stupid_len ( ss ) - estimated_len ( ss , s ) ) ) :
reg = ( r'{prefix}({tails})?' . format ( prefix = s , tails = '|' . join ( sorted ( [ p [ len ( s ) : ] for p in pp ] , key = len , reverse = True ) ) ) )
assert len ( reg ) == estimated_len ( pp , s )
output . append ( reg )
objs -= ( pp | set ( [ s ] ) )
for residual in objs :
output . append ( residual )
return re . sub ( r'\(([^)])\)\?' , r'\1?' , r'|' . join ( output ) ) |
async def _remote_close ( self , exc = None ) :
"""close session from remote .""" | if self . state in ( STATE_CLOSING , STATE_CLOSED ) :
return
log . info ( "close session: %s" , self . id )
self . state = STATE_CLOSING
if exc is not None :
self . exception = exc
self . interrupted = True
try :
await self . handler ( SockjsMessage ( MSG_CLOSE , exc ) , self )
except Exception :
log . exception ( "Exception in close handler." ) |
def likelihood ( args ) :
"""% prog likelihood
Plot likelihood surface . Look for two files in the current folder :
- 100_100 . log , haploid model
- 100_20 . log , diploid model""" | p = OptionParser ( likelihood . __doc__ )
opts , args , iopts = p . set_image_options ( args , figsize = "10x5" , style = "white" , cmap = "coolwarm" )
if len ( args ) != 0 :
sys . exit ( not p . print_help ( ) )
fig , ( ax1 , ax2 ) = plt . subplots ( ncols = 2 , nrows = 1 , figsize = ( iopts . w , iopts . h ) )
plt . tight_layout ( pad = 4 )
# Haploid model
LL , CI_h1 , CI_h2 , MLE = parse_log ( "100_100.log" )
data = [ ]
for k , v in LL . items ( ) :
data . append ( ( k [ 0 ] , v ) )
data . sort ( )
x , y = zip ( * data )
x = np . array ( x )
curve , = ax1 . plot ( x , y , "-" , color = lsg , lw = 2 )
ax1 . set_title ( "Simulated haploid ($h^{truth}=100$)" )
h_hat , max_LL = max ( data , key = lambda x : x [ - 1 ] )
_ , min_LL = min ( data , key = lambda x : x [ - 1 ] )
ymin , ymax = ax1 . get_ylim ( )
ax1 . set_ylim ( [ ymin , ymax + 30 ] )
LL_label = "log(Likelihood)"
ax1 . plot ( [ h_hat , h_hat ] , [ ymin , max_LL ] , ":" , color = lsg , lw = 2 )
ax1 . text ( h_hat , max_LL + 10 , r"$\hat{h}=93$" , color = lsg )
ax1 . set_xlabel ( r"$h$" )
ax1 . set_ylabel ( LL_label )
a , b = CI_h1
ci = ax1 . fill_between ( x , [ ymin ] * len ( x ) , y , where = ( x >= a ) & ( x <= b ) , color = lsg , alpha = .5 )
ax1 . legend ( [ curve , ci ] , [ "Likelihood curve" , r'95$\%$ CI' ] , loc = 'best' )
# Diploid model
LL , CI_h1 , CI_h2 , MLE = parse_log ( "100_20.log" )
h_hat , max_LL = max ( data , key = lambda x : x [ - 1 ] )
_ , min_LL = min ( data , key = lambda x : x [ - 1 ] )
data = np . ones ( ( 301 , 301 ) ) * min_LL
for k , v in LL . items ( ) :
a , b = k
data [ a , b ] = v
data [ b , a ] = v
data = mask_upper_triangle ( data )
ax_imshow ( ax2 , data , opts . cmap , LL_label , 20 , 104 )
root = fig . add_axes ( [ 0 , 0 , 1 , 1 ] )
pad = .04
panel_labels ( root , ( ( pad / 2 , 1 - pad , "A" ) , ( 1 / 2. , 1 - pad , "B" ) ) )
normalize_axes ( root )
image_name = "likelihood." + iopts . format
savefig ( image_name , dpi = iopts . dpi , iopts = iopts ) |
def parse ( readDataInstance ) :
"""Returns a new L { ExportTableEntry } object .
@ type readDataInstance : L { ReadData }
@ param readDataInstance : A L { ReadData } object with data to be parsed as a L { ExportTableEntry } object .
@ rtype : L { ExportTableEntry }
@ return : A new L { ExportTableEntry } object .""" | exportEntry = ExportTableEntry ( )
exportEntry . functionRva . value = readDataInstance . readDword ( )
exportEntry . nameOrdinal . value = readDataInstance . readWord ( )
exportEntry . nameRva . value = readDataInstance . readDword ( )
exportEntry . name . value = readDataInstance . readString ( )
return exportEntry |
async def result_processor ( tasks ) :
"""An async result aggregator that combines all the results
This gets executed in unsync . loop and unsync . thread""" | output = { }
for task in tasks :
num , res = await task
output [ num ] = res
return output |
def pivot ( self , wavelengths = None ) :
"""Calculate : ref : ` pivot wavelength < synphot - formula - pivwv > ` .
Parameters
wavelengths : array - like , ` ~ astropy . units . quantity . Quantity ` , or ` None `
Wavelength values for sampling .
If not a Quantity , assumed to be in Angstrom .
If ` None ` , ` waveset ` is used .
Returns
pivwv : ` ~ astropy . units . quantity . Quantity `
Pivot wavelength .""" | x = self . _validate_wavelengths ( wavelengths ) . value
y = self ( x ) . value
num = np . trapz ( y * x , x = x )
den = np . trapz ( y / x , x = x )
if den == 0 : # pragma : no cover
pivwv = 0.0
else :
pivwv = np . sqrt ( abs ( num / den ) )
return pivwv * self . _internal_wave_unit |
def usearchdb ( fasta , alignment = 'local' , usearch_loc = 'usearch' ) :
"""make usearch db""" | if '.udb' in fasta :
print ( '# ... database found: %s' % ( fasta ) , file = sys . stderr )
return fasta
type = check_type ( fasta )
db = '%s.%s.udb' % ( fasta . rsplit ( '.' , 1 ) [ 0 ] , type )
if os . path . exists ( db ) is False :
print ( '# ... making usearch db for: %s' % ( fasta ) , file = sys . stderr )
if alignment == 'local' :
os . system ( '%s -makeudb_ublast %s -output %s >> log.txt' % ( usearch_loc , fasta , db ) )
elif alignment == 'global' :
os . system ( '%s -makeudb_usearch %s -output %s >> log.txt' % ( usearch_loc , fasta , db ) )
else :
print ( '# ... database found for: %s' % ( fasta ) , file = sys . stderr )
return db |
def deserialize_dict ( self , attr , dict_type ) :
"""Deserialize a dictionary .
: param dict / list attr : Dictionary to be deserialized . Also accepts
a list of key , value pairs .
: param str dict _ type : The object type of the items in the dictionary .
: rtype : dict""" | if isinstance ( attr , list ) :
return { x [ 'key' ] : self . deserialize_data ( x [ 'value' ] , dict_type ) for x in attr }
if isinstance ( attr , ET . Element ) : # Transform < Key > value < / Key > into { " Key " : " value " }
attr = { el . tag : el . text for el in attr }
return { k : self . deserialize_data ( v , dict_type ) for k , v in attr . items ( ) } |
def expand_indent ( line ) :
r"""Return the amount of indentation .
Tabs are expanded to the next multiple of 8.
> > > expand _ indent ( ' ' )
> > > expand _ indent ( ' \ t ' )
> > > expand _ indent ( ' \ t ' )
> > > expand _ indent ( ' \ t ' )
16""" | if '\t' not in line :
return len ( line ) - len ( line . lstrip ( ) )
result = 0
for char in line :
if char == '\t' :
result = result // 8 * 8 + 8
elif char == ' ' :
result += 1
else :
break
return result |
def parse_flash_log ( self , logf ) :
"""parse flash logs""" | data = OrderedDict ( )
samplelogs = self . split_log ( logf [ 'f' ] )
for slog in samplelogs :
try :
sample = dict ( )
# # Sample name # #
s_name = self . clean_pe_name ( slog , logf [ 'root' ] )
if s_name is None :
continue
sample [ 's_name' ] = s_name
# # Log attributes # #
sample [ 'totalpairs' ] = self . get_field ( 'Total pairs' , slog )
sample [ 'discardpairs' ] = self . get_field ( 'Discarded pairs' , slog )
sample [ 'percdiscard' ] = self . get_field ( 'Percent Discarded' , slog , fl = True )
sample [ 'combopairs' ] = self . get_field ( 'Combined pairs' , slog )
sample [ 'inniepairs' ] = self . get_field ( 'Innie pairs' , slog )
sample [ 'outiepairs' ] = self . get_field ( 'Outie pairs' , slog )
sample [ 'uncombopairs' ] = self . get_field ( 'Uncombined pairs' , slog )
sample [ 'perccombo' ] = self . get_field ( 'Percent combined' , slog , fl = True )
data [ s_name ] = sample
except Exception as err :
log . warning ( "Error parsing record in {}. {}" . format ( logf [ 'fn' ] , err ) )
log . debug ( traceback . format_exc ( ) )
continue
return data |
def get_indexer_for ( self , target , ** kwargs ) :
"""Guaranteed return of an indexer even when non - unique .
This dispatches to get _ indexer or get _ indexer _ nonunique
as appropriate .""" | if self . is_unique :
return self . get_indexer ( target , ** kwargs )
indexer , _ = self . get_indexer_non_unique ( target , ** kwargs )
return indexer |
def add_cyclic_datepart ( df : DataFrame , field_name : str , prefix : str = None , drop : bool = True , time : bool = False , add_linear : bool = False ) :
"Helper function that adds trigonometric date / time features to a date in the column ` field _ name ` of ` df ` ." | make_date ( df , field_name )
field = df [ field_name ]
prefix = ifnone ( prefix , re . sub ( '[Dd]ate$' , '' , field_name ) )
series = field . apply ( partial ( cyclic_dt_features , time = time , add_linear = add_linear ) )
columns = [ prefix + c for c in cyclic_dt_feat_names ( time , add_linear ) ]
df_feats = pd . DataFrame ( [ item for item in series ] , columns = columns , index = series . index )
df = pd . concat ( [ df , df_feats ] , axis = 1 )
if drop :
df . drop ( field_name , axis = 1 , inplace = True )
return df |
def account_history ( self , account , count ) :
"""Reports send / receive information for a * * account * *
: param account : Account to get send / receive information for
: type account : str
: param count : number of blocks to return
: type count : int
: raises : : py : exc : ` nano . rpc . RPCException `
> > > rpc . account _ history (
. . . account = " xrb _ 3e3j5tkog48pnny9dmfzj1r16pg8t1e76dz5tmac6iq689wyjfpi00000 " ,
. . . count = 1
" hash " : " 000D1BAEC8EC208142C99059B393051BAC8380F9B5A2E6B2489A277D81789F3F " ,
" type " : " receive " ,
" account " : " xrb _ 3e3j5tkog48pnny9dmfzj1r16pg8t1e76dz5tmac6iq689wyjfpi00000 " ,
" amount " : 100000""" | account = self . _process_value ( account , 'account' )
count = self . _process_value ( count , 'int' )
payload = { "account" : account , "count" : count }
resp = self . call ( 'account_history' , payload )
history = resp . get ( 'history' ) or [ ]
for entry in history :
entry [ 'amount' ] = int ( entry [ 'amount' ] )
return history |
def _inferSchema ( self , rdd , samplingRatio = None , names = None ) :
"""Infer schema from an RDD of Row or tuple .
: param rdd : an RDD of Row or tuple
: param samplingRatio : sampling ratio , or no sampling ( default )
: return : : class : ` pyspark . sql . types . StructType `""" | first = rdd . first ( )
if not first :
raise ValueError ( "The first row in RDD is empty, " "can not infer schema" )
if type ( first ) is dict :
warnings . warn ( "Using RDD of dict to inferSchema is deprecated. " "Use pyspark.sql.Row instead" )
if samplingRatio is None :
schema = _infer_schema ( first , names = names )
if _has_nulltype ( schema ) :
for row in rdd . take ( 100 ) [ 1 : ] :
schema = _merge_type ( schema , _infer_schema ( row , names = names ) )
if not _has_nulltype ( schema ) :
break
else :
raise ValueError ( "Some of types cannot be determined by the " "first 100 rows, please try again with sampling" )
else :
if samplingRatio < 0.99 :
rdd = rdd . sample ( False , float ( samplingRatio ) )
schema = rdd . map ( lambda row : _infer_schema ( row , names ) ) . reduce ( _merge_type )
return schema |
def find_shape ( bottom_lines , max_len ) :
"""Finds a shape of lowest horizontal lines with step = 1
: param bottom _ lines :
: param max _ len :
: return : list of levels ( row values ) , list indexes are columns""" | shape = [ 1 ] * max_len
for i in range ( max_len ) :
for line in bottom_lines :
if line [ 0 ] <= i + 1 < line [ 2 ] :
shape [ i ] = line [ 1 ]
break
return shape |
def wolmatch ( tgt , tgt_type = 'glob' , bcast = '255.255.255.255' , destport = 9 ) :
'''Send a " Magic Packet " to wake up Minions that are matched in the grains cache
CLI Example :
. . code - block : : bash
salt - run network . wolmatch minion _ id
salt - run network . wolmatch 192.168.0.0/16 tgt _ type = ' ipcidr ' bcast = 255.255.255.255 destport = 7''' | ret = [ ]
minions = __salt__ [ 'cache.grains' ] ( tgt , tgt_type )
for minion in minions :
for iface , mac in minion [ 'hwaddr_interfaces' ] . items ( ) :
if iface == 'lo' :
continue
mac = mac . strip ( )
wol ( mac , bcast , destport )
log . info ( 'Waking up %s' , mac )
ret . append ( mac )
return ret |
def add_component ( self , kind , ** kwargs ) :
"""Add a new component ( star or orbit ) to the system . If not provided ,
' component ' ( the name of the new star or orbit ) will be created for
you and can be accessed by the ' component ' attribute of the returned
ParameterSet .
> > > b . add _ component ( component . star )
or
> > > b . add _ component ( ' orbit ' , period = 2.5)
Available kinds include :
* : func : ` phoebe . parameters . component . star `
* : func : ` phoebe . parameters . component . orbit `
: parameter kind : function to call that returns a
ParameterSet or list of parameters . This must either be
a callable function that accepts nothing but default
values , or the name of a function ( as a string ) that can
be found in the : mod : ` phoebe . parameters . component ` module
( ie . ' star ' , ' orbit ' )
: type kind : str or callable
: parameter str component : ( optional ) name of the newly - created
component
: parameter * * kwargs : default values for any of the newly - created
parameters
: return : : class : ` phoebe . parameters . parameters . ParameterSet ` of
all parameters that have been added
: raises NotImplementedError : if required constraint is not implemented""" | func = _get_add_func ( component , kind )
if kwargs . get ( 'component' , False ) is None : # then we want to apply the default below , so let ' s pop for now
_ = kwargs . pop ( 'component' )
kwargs . setdefault ( 'component' , self . _default_label ( func . func_name , ** { 'context' : 'component' , 'kind' : func . func_name } ) )
if kwargs . pop ( 'check_label' , True ) :
self . _check_label ( kwargs [ 'component' ] )
params , constraints = func ( ** kwargs )
metawargs = { 'context' : 'component' , 'component' : kwargs [ 'component' ] , 'kind' : func . func_name }
self . _attach_params ( params , ** metawargs )
redo_kwargs = deepcopy ( kwargs )
redo_kwargs [ 'func' ] = func . func_name
self . _add_history ( redo_func = 'add_component' , redo_kwargs = redo_kwargs , undo_func = 'remove_component' , undo_kwargs = { 'component' : kwargs [ 'component' ] } )
for constraint in constraints :
self . add_constraint ( * constraint )
# since we ' ve already processed ( so that we can get the new qualifiers ) ,
# we ' ll only raise a warning
self . _kwargs_checks ( kwargs , warning_only = True )
# return params
return self . get_component ( ** metawargs ) |
def dispense ( self ) :
'''dispense a card if ready , otherwise throw an Exception''' | self . sendcommand ( Vendapin . DISPENSE )
# wait for the reply
time . sleep ( 1 )
# parse the reply
response = self . receivepacket ( )
print ( 'Vendapin.dispense(): ' + str ( response ) )
if not self . was_packet_accepted ( response ) :
raise Exception ( 'DISPENSE packet not accepted: ' + str ( response ) )
return self . parsedata ( response ) [ 0 ] |
def references ( self ) :
"""Return ( tail , head ) pairs for each edge in the
graph .""" | return [ ( tail , head ) for tail in self . vertices for head in self . children ( tail ) ] |
def get_process_fingerprint ( ) :
"""Extract a unique fingerprint for the current process , using a
combination of the process PID and the system ' s hostname .""" | pid = os . getpid ( )
hostname = socket . gethostname ( )
padded_pid = _pad ( _to_base36 ( pid ) , 2 )
hostname_hash = sum ( [ ord ( x ) for x in hostname ] ) + len ( hostname ) + 36
padded_hostname = _pad ( _to_base36 ( hostname_hash ) , 2 )
return padded_pid + padded_hostname |
def from_file ( cls , filename , source ) :
"""Load a theme from the specified configuration file .
Parameters :
filename : The name of the filename to load .
source : A description of where the theme was loaded from .""" | _logger . info ( 'Loading theme %s' , filename )
try :
config = configparser . ConfigParser ( )
config . optionxform = six . text_type
# Preserve case
with codecs . open ( filename , encoding = 'utf-8' ) as fp :
config . readfp ( fp )
except configparser . ParsingError as e :
raise ConfigError ( e . message )
if not config . has_section ( 'theme' ) :
raise ConfigError ( 'Error loading {0}:\n' ' missing [theme] section' . format ( filename ) )
theme_name = os . path . basename ( filename )
theme_name , _ = os . path . splitext ( theme_name )
elements = { }
for element , line in config . items ( 'theme' ) :
if element not in cls . DEFAULT_ELEMENTS : # Could happen if using a new config with an older version
# of the software
_logger . info ( 'Skipping element %s' , element )
continue
elements [ element ] = cls . _parse_line ( element , line , filename )
return cls ( name = theme_name , source = source , elements = elements ) |
def card_names_and_ids ( self ) :
"""Returns [ ( name , id ) , . . . ] pairs of cards from current board""" | b = Board ( self . client , self . board_id )
cards = b . getCards ( )
card_names_and_ids = [ ( unidecode ( c . name ) , c . id ) for c in cards ]
return card_names_and_ids |
def insertBPoint ( self , index , type = None , anchor = None , bcpIn = None , bcpOut = None , bPoint = None ) :
"""Insert a bPoint at index in the contour .""" | if bPoint is not None :
if type is None :
type = bPoint . type
if anchor is None :
anchor = bPoint . anchor
if bcpIn is None :
bcpIn = bPoint . bcpIn
if bcpOut is None :
bcpOut = bPoint . bcpOut
index = normalizers . normalizeIndex ( index )
type = normalizers . normalizeBPointType ( type )
anchor = normalizers . normalizeCoordinateTuple ( anchor )
if bcpIn is None :
bcpIn = ( 0 , 0 )
bcpIn = normalizers . normalizeCoordinateTuple ( bcpIn )
if bcpOut is None :
bcpOut = ( 0 , 0 )
bcpOut = normalizers . normalizeCoordinateTuple ( bcpOut )
self . _insertBPoint ( index = index , type = type , anchor = anchor , bcpIn = bcpIn , bcpOut = bcpOut ) |
def to_directory ( self , directory , force = False ) :
"""Write this security object to a directory .
Parameters
directory : str
The directory to write the configuration to .
force : bool , optional
If security credentials already exist at this location , an error
will be raised by default . Set to True to overwrite existing files .
Returns
security : Security
A new security object backed by the written files .""" | self . _validate ( )
# Create directory if it doesn ' t exist
makedirs ( directory , exist_ok = True )
cert_path = os . path . join ( directory , 'skein.crt' )
key_path = os . path . join ( directory , 'skein.pem' )
cert_bytes = self . _get_bytes ( 'cert' )
key_bytes = self . _get_bytes ( 'key' )
lock_path = os . path . join ( directory , 'skein.lock' )
with lock_file ( lock_path ) :
for path , name in [ ( cert_path , 'skein.crt' ) , ( key_path , 'skein.pem' ) ] :
if os . path . exists ( path ) :
if force :
os . unlink ( path )
else :
msg = ( "%r file already exists, use `%s` to overwrite" % ( name , '--force' if context . is_cli else 'force' ) )
raise context . FileExistsError ( msg )
flags = os . O_WRONLY | os . O_CREAT | os . O_EXCL
for path , data in [ ( cert_path , cert_bytes ) , ( key_path , key_bytes ) ] :
with os . fdopen ( os . open ( path , flags , 0o600 ) , 'wb' ) as fil :
fil . write ( data )
return Security ( cert_file = cert_path , key_file = key_path ) |
def subnet_group_present ( name , subnet_ids = None , subnet_names = None , description = None , tags = None , region = None , key = None , keyid = None , profile = None ) :
'''Ensure ElastiCache subnet group exists .
. . versionadded : : 2015.8.0
name
The name for the ElastiCache subnet group . This value is stored as a lowercase string .
subnet _ ids
A list of VPC subnet IDs for the cache subnet group . Exclusive with subnet _ names .
subnet _ names
A list of VPC subnet names for the cache subnet group . Exclusive with subnet _ ids .
description
Subnet group description .
tags
A list of tags .
region
Region to connect to .
key
Secret key to be used .
keyid
Access key to be used .
profile
A dict with region , key and keyid , or a pillar key ( string ) that
contains a dict with region , key and keyid .''' | ret = { 'name' : name , 'result' : True , 'comment' : '' , 'changes' : { } }
exists = __salt__ [ 'boto_elasticache.subnet_group_exists' ] ( name = name , tags = tags , region = region , key = key , keyid = keyid , profile = profile )
if not exists :
if __opts__ [ 'test' ] :
ret [ 'comment' ] = 'Subnet group {0} is set to be created.' . format ( name )
ret [ 'result' ] = None
return ret
created = __salt__ [ 'boto_elasticache.create_subnet_group' ] ( name = name , subnet_ids = subnet_ids , subnet_names = subnet_names , description = description , tags = tags , region = region , key = key , keyid = keyid , profile = profile )
if not created :
ret [ 'result' ] = False
ret [ 'comment' ] = 'Failed to create {0} subnet group.' . format ( name )
return ret
ret [ 'changes' ] [ 'old' ] = None
ret [ 'changes' ] [ 'new' ] = name
ret [ 'comment' ] = 'Subnet group {0} created.' . format ( name )
return ret
ret [ 'comment' ] = 'Subnet group present.'
return ret |
def _name_attribute_to_string ( self , name ) :
"""Build a / - separated string from an x509 . Name .""" | return "" . join ( "/{}={}" . format ( self . _get_oid_name ( attr . oid ) , attr . value , ) for attr in name ) |
def MI_associatorNames ( self , env , objectName , assocClassName , resultClassName , role , resultRole ) : # pylint : disable = invalid - name
"""Return instances names associated to a given object .
Implements the WBEM operation AssociatorNames in terms
of the references method . A derived class will not normally
override this method .""" | logger = env . get_logger ( )
logger . log_debug ( 'CIMProvider2 MI_associatorNames called. ' 'assocClass: %s' % ( assocClassName ) )
if not assocClassName :
raise pywbem . CIMError ( pywbem . CIM_ERR_FAILED , "Empty assocClassName passed to AssociatorNames" )
model = pywbem . CIMInstance ( classname = assocClassName )
model . path = pywbem . CIMInstanceName ( classname = assocClassName , namespace = objectName . namespace )
gen = self . references ( env = env , object_name = objectName , model = model , result_class_name = resultClassName , role = role , result_role = None , keys_only = False )
if gen is None :
logger . log_debug ( 'references() returned None instead of ' 'generator object' )
return
for inst in gen :
for prop in inst . properties . values ( ) :
lpname = prop . name . lower ( )
if prop . type != 'reference' :
continue
if role and role . lower ( ) == lpname :
continue
if resultRole and resultRole . lower ( ) != lpname :
continue
if self . paths_equal ( prop . value , objectName ) :
continue
if resultClassName and resultClassName . lower ( ) != prop . value . classname . lower ( ) :
continue
if prop . value . namespace is None :
prop . value . namespace = objectName . namespace
yield prop . value
logger . log_debug ( 'CIMProvider2 MI_associatorNames returning' ) |
def schemes_similar ( scheme1 , scheme2 ) :
'''Return whether URL schemes are similar .
This function considers the following schemes to be similar :
* HTTP and HTTPS''' | if scheme1 == scheme2 :
return True
if scheme1 in ( 'http' , 'https' ) and scheme2 in ( 'http' , 'https' ) :
return True
return False |
def scrape ( text , ptype = None ) :
'''Scrape types from a blob of text and return node tuples .
Args :
text ( str ) : Text to scrape .
ptype ( str ) : Optional ptype to scrape . If present , only scrape rules which match the provided type .
Returns :
( str , str ) : Yield tuples of type , valu strings .''' | for ruletype , rule , info in scrape_types :
if ptype and ptype != ruletype :
continue
regx = regexes . get ( ruletype )
for valu in regx . findall ( text ) :
yield ( ruletype , valu ) |
from typing import List
def parse_music ( music_string : str ) -> List [ int ] :
"""Parse a string representing musical notes in a special ASCII format into a list of integers
representing how many beats each note lasts for .
: param music _ string : a string where each note is represented by either ' o ' , ' o | ' or ' . | '
: return : a list of integers indicating how many beats each note lasts for
The mapping is as follows :
' o ' - whole note , lasts four beats
' o | ' - half note , lasts two beats
' . | ' - quater note , lasts one beat
> > > parse _ music ( ' o o | . | o | o | . | . | . | . | o o ' )
[4 , 2 , 1 , 2 , 2 , 1 , 1 , 1 , 1 , 4 , 4]""" | beats_map = { 'o' : 4 , 'o|' : 2 , '.|' : 1 }
return [ beats_map [ note ] for note in music_string . split ( ) ] |
def _get_entity_pair ( self , entity_id ) :
"""Returns ` ` ( entity , input _ entity ) ` ` for the given entity ID .""" | entity = self . _entities . get ( entity_id )
try :
input_entity = utils . get_input_peer ( entity )
except TypeError :
try :
input_entity = self . _client . _entity_cache [ entity_id ]
except KeyError :
input_entity = None
return entity , input_entity |
def _close_socket ( self ) :
"""Shutdown and close the Socket .
: return :""" | try :
self . socket . shutdown ( socket . SHUT_RDWR )
except ( OSError , socket . error ) :
pass
self . socket . close ( ) |
def download_source_gafs ( group_metadata , target_dir , exclusions = [ ] , base_download_url = None ) :
"""This looks at a group metadata dictionary and downloads each GAF source that is not in the exclusions list .
For each downloaded file , keep track of the path of the file . If the file is zipped , it will unzip it here .
This function returns a list of tuples of the dataset dictionary mapped to the downloaded source path .""" | gaf_urls = [ ( data , data [ "source" ] ) for data in group_metadata [ "datasets" ] if data [ "type" ] == "gaf" and data [ "dataset" ] not in exclusions ]
# List of dataset metadata to gaf download url
click . echo ( "Found {}" . format ( ", " . join ( [ kv [ 0 ] [ "dataset" ] for kv in gaf_urls ] ) ) )
downloaded_paths = [ ]
for dataset_metadata , gaf_url in gaf_urls :
dataset = dataset_metadata [ "dataset" ]
# Local target download path setup - path and then directories
path = download_a_dataset_source ( group_metadata [ "id" ] , dataset_metadata , target_dir , gaf_url , base_download_url = base_download_url )
if dataset_metadata [ "compression" ] == "gzip" : # Unzip any downloaded file that has gzip , strip of the gzip extension
unzipped = os . path . splitext ( path ) [ 0 ]
unzip ( path , unzipped )
path = unzipped
else : # otherwise file is coming in uncompressed . But we want to make sure
# to zip up the original source also
zipup ( path )
downloaded_paths . append ( ( dataset_metadata , path ) )
return downloaded_paths |
def format ( self , options = None ) :
"""Format this diagnostic for display . The options argument takes
Diagnostic . Display * flags , which can be combined using bitwise OR . If
the options argument is not provided , the default display options will
be used .""" | if options is None :
options = conf . lib . clang_defaultDiagnosticDisplayOptions ( )
if options & ~ Diagnostic . _FormatOptionsMask :
raise ValueError ( 'Invalid format options' )
return conf . lib . clang_formatDiagnostic ( self , options ) |
def append ( self , name , value ) :
"""Appends the string ` ` value ` ` to the value at ` ` key ` ` . If ` ` key ` `
doesn ' t already exist , create it with a value of ` ` value ` ` .
Returns the new length of the value at ` ` key ` ` .
: param name : str the name of the redis key
: param value : str
: return : Future ( )""" | with self . pipe as pipe :
return pipe . append ( self . redis_key ( name ) , self . valueparse . encode ( value ) ) |
def certclone ( chain , copy_extensions = False ) :
for i in range ( len ( chain ) ) :
chain [ i ] = chain [ i ] . to_cryptography ( )
newchain = [ ]
'''key = rsa . generate _ private _ key (
public _ exponent = 65537,
key _ size = 2048,
backend = default _ backend ( )
pubkey = key . public _ key ( )''' | first = True
for original in chain [ : : - 1 ] : # print ( cert )
key = rsa . generate_private_key ( public_exponent = 65537 , key_size = 2048 , backend = default_backend ( ) )
key_pem = key . private_bytes ( encoding = serialization . Encoding . PEM , format = serialization . PrivateFormat . TraditionalOpenSSL , encryption_algorithm = serialization . NoEncryption ( ) ) . decode ( )
if first :
print ( key_pem )
first = False
pubkey = key . public_key ( )
# Todo : Code to mimic the private key type of original cert
# maybe based on pubkey . _ _ class _ _
cert = x509 . CertificateBuilder ( )
cert = cert . subject_name ( original . subject )
cert = cert . issuer_name ( original . issuer )
# cert = cert . serial _ number ( original . serial _ number )
cert = cert . serial_number ( x509 . random_serial_number ( ) )
cert = cert . not_valid_before ( original . not_valid_before )
cert = cert . not_valid_after ( original . not_valid_after )
cert = cert . public_key ( pubkey )
if copy_extensions :
for ext in original . extensions :
cert = cert . add_extension ( ext . value , critical = ext . critical )
cert = cert . sign ( private_key = key , algorithm = original . signature_hash_algorithm , backend = default_backend ( ) )
cert_pem = cert . public_bytes ( serialization . Encoding . PEM ) . decode ( )
print ( cert_pem )
newchain . insert ( 0 , cert ) |
def values ( self ) :
"""Return the hyperparameter values as a Python dictionary .
Returns :
A dictionary with hyperparameter names as keys . The values are the
hyperparameter values .""" | return { n : getattr ( self , n ) for n in self . _hparam_types . keys ( ) } |
def exec_python ( attr , src , executable = "python" ) :
"""Runs a python subproc to calculate a package attribute .
Args :
attr ( str ) : Name of package attribute being created .
src ( list of str ) : Python code to execute , will be converted into
semicolon - delimited single line of code .
Returns :
str : Output of python process .""" | import subprocess
if isinstance ( src , basestring ) :
src = [ src ]
p = popen ( [ executable , "-c" , "; " . join ( src ) ] , stdout = subprocess . PIPE , stderr = subprocess . PIPE )
out , err = p . communicate ( )
if p . returncode :
from rez . exceptions import InvalidPackageError
raise InvalidPackageError ( "Error determining package attribute '%s':\n%s" % ( attr , err ) )
return out . strip ( ) |
def Refresh ( self ) :
"""Reloads the group object to synchronize with cloud representation .
> > > clc . v2 . Group ( " wa - 1234 " ) . Refresh ( )""" | self . dirty = False
self . data = clc . v2 . API . Call ( 'GET' , 'groups/%s/%s' % ( self . alias , self . id ) , session = self . session )
self . data [ 'changeInfo' ] [ 'createdDate' ] = clc . v2 . time_utils . ZuluTSToSeconds ( self . data [ 'changeInfo' ] [ 'createdDate' ] )
self . data [ 'changeInfo' ] [ 'modifiedDate' ] = clc . v2 . time_utils . ZuluTSToSeconds ( self . data [ 'changeInfo' ] [ 'modifiedDate' ] ) |
def mutating_method ( func ) :
"""Decorator for methods that are allowed to modify immutable objects""" | def wrapper ( self , * __args , ** __kwargs ) :
old_mutable = self . _mutable
self . _mutable = True
try : # Call the wrapped function
return func ( self , * __args , ** __kwargs )
finally :
self . _mutable = old_mutable
return wrapper |
def scv_link ( scv_sig , rcv_trip ) :
'''Creates links between SCV based on their pathonicty / significance calls
# GENO : 0000840 - GENO : 0000840 - - > is _ equilavent _ to SEPIO : 0000098
# GENO : 0000841 - GENO : 0000841 - - > is _ equilavent _ to SEPIO : 0000098
# GENO : 0000843 - GENO : 0000843 - - > is _ equilavent _ to SEPIO : 0000098
# GENO : 0000844 - GENO : 0000844 - - > is _ equilavent _ to SEPIO : 0000098
# GENO : 0000840 - GENO : 0000844 - - > contradicts SEPIO : 0000101
# GENO : 0000841 - GENO : 0000844 - - > contradicts SEPIO : 0000101
# GENO : 0000841 - GENO : 0000843 - - > contradicts SEPIO : 0000101
# GENO : 0000840 - GENO : 0000841 - - > is _ consistent _ with SEPIO : 0000099
# GENO : 0000843 - GENO : 0000844 - - > is _ consistent _ with SEPIO : 0000099
# GENO : 0000840 - GENO : 0000843 - - > strongly _ contradicts SEPIO : 0000100''' | sig = { # ' arbitrary scoring scheme increments as powers of two '
'GENO:0000840' : 1 , # pathogenic
'GENO:0000841' : 2 , # likely pathogenic
'GENO:0000844' : 4 , # likely benign
'GENO:0000843' : 8 , # benign
'GENO:0000845' : 16 , # uncertain significance
}
lnk = { # specific result from diff in ' arbitrary scoring scheme '
0 : 'SEPIO:0000098' , # is _ equilavent _ to
1 : 'SEPIO:0000099' , # is _ consistent _ with
2 : 'SEPIO:0000101' , # contradicts
3 : 'SEPIO:0000101' , # contradicts
4 : 'SEPIO:0000099' , # is _ consistent _ with
6 : 'SEPIO:0000101' , # contradicts
7 : 'SEPIO:0000100' , # strongly _ contradicts
8 : 'SEPIO:0000126' , # is _ inconsistent _ with
12 : 'SEPIO:0000126' , 14 : 'SEPIO:0000126' , 15 : 'SEPIO:0000126' , }
keys = sorted ( scv_sig . keys ( ) )
for scv_a in keys :
scv_av = scv_sig . pop ( scv_a )
for scv_b in scv_sig . keys ( ) :
link = lnk [ abs ( sig [ scv_av ] - sig [ scv_sig [ scv_b ] ] ) ]
rcv_trip . append ( make_spo ( scv_a , link , scv_b ) )
rcv_trip . append ( make_spo ( scv_b , link , scv_a ) )
return |
def _decode_doubles ( message ) :
"""Helper for decode _ qp , decodes a double array .
The double array is stored as little endian 64 bit doubles .
The array has then been base64 encoded . Since we are decoding we do these
steps in reverse .
Args :
message : the double array
Returns :
decoded double array""" | binary = base64 . b64decode ( message )
return struct . unpack ( '<' + ( 'd' * ( len ( binary ) // 8 ) ) , binary ) |
def get_users_for_assigned_to ( ) :
"""Return a list of users who can be assigned to workflow states""" | User = get_user_model ( )
return User . objects . filter ( is_active = True , is_staff = True ) |
def get_install_requires_odoo_addons ( addons_dir , depends_override = { } , external_dependencies_override = { } , odoo_version_override = None ) :
"""Get the list of requirements for a directory containing addons""" | addon_dirs = [ ]
addons = os . listdir ( addons_dir )
for addon in addons :
addon_dir = os . path . join ( addons_dir , addon )
if is_installable_addon ( addon_dir ) :
addon_dirs . append ( addon_dir )
install_requires = set ( )
for addon_dir in addon_dirs :
r = get_install_requires_odoo_addon ( addon_dir , no_depends = addons , depends_override = depends_override , external_dependencies_override = external_dependencies_override , odoo_version_override = odoo_version_override , )
install_requires . update ( r )
return sorted ( install_requires ) |
def save_config ( self , cmd = "save configuration primary" , confirm = False , confirm_response = "" ) :
"""Saves configuration .""" | return super ( ExtremeExosBase , self ) . save_config ( cmd = cmd , confirm = confirm , confirm_response = confirm_response ) |
def parse_glyphs_filter ( filter_str , is_pre = False ) :
"""Parses glyphs custom filter string into a dict object that
ufo2ft can consume .
Reference :
ufo2ft : https : / / github . com / googlei18n / ufo2ft
Glyphs 2.3 Handbook July 2016 , p184
Args :
filter _ str - a string of glyphs app filter
Return :
A dictionary contains the structured filter .
Return None if parse failed .""" | elements = filter_str . split ( ";" )
if elements [ 0 ] == "" :
logger . error ( "Failed to parse glyphs filter, expecting a filter name: \
%s" , filter_str , )
return None
result = { "name" : elements [ 0 ] }
for idx , elem in enumerate ( elements [ 1 : ] ) :
if not elem : # skip empty arguments
continue
if ":" in elem : # Key value pair
key , value = elem . split ( ":" , 1 )
if key . lower ( ) in [ "include" , "exclude" ] :
if idx != len ( elements [ 1 : ] ) - 1 :
logger . error ( "{} can only present as the last argument in the filter. " "{} is ignored." . format ( key , elem ) )
continue
result [ key . lower ( ) ] = re . split ( "[ ,]+" , value )
else :
if "kwargs" not in result :
result [ "kwargs" ] = { }
result [ "kwargs" ] [ key ] = cast_to_number_or_bool ( value )
else :
if "args" not in result :
result [ "args" ] = [ ]
result [ "args" ] . append ( cast_to_number_or_bool ( elem ) )
if is_pre :
result [ "pre" ] = True
return result |
def cmd_gimbal_status ( self , args ) :
'''show gimbal status''' | master = self . master
if 'GIMBAL_REPORT' in master . messages :
print ( master . messages [ 'GIMBAL_REPORT' ] )
else :
print ( "No GIMBAL_REPORT messages" ) |
def validate ( cert , ca_name , crl_file ) :
'''. . versionadded : : Neon
Validate a certificate against a given CA / CRL .
cert
path to the certifiate PEM file or string
ca _ name
name of the CA
crl _ file
full path to the CRL file''' | store = OpenSSL . crypto . X509Store ( )
cert_obj = _read_cert ( cert )
if cert_obj is None :
raise CommandExecutionError ( 'Failed to read cert from {0}, see log for details' . format ( cert ) )
ca_dir = '{0}/{1}' . format ( cert_base_path ( ) , ca_name )
ca_cert = _read_cert ( '{0}/{1}_ca_cert.crt' . format ( ca_dir , ca_name ) )
store . add_cert ( ca_cert )
# These flags tell OpenSSL to check the leaf as well as the
# entire cert chain .
X509StoreFlags = OpenSSL . crypto . X509StoreFlags
store . set_flags ( X509StoreFlags . CRL_CHECK | X509StoreFlags . CRL_CHECK_ALL )
if crl_file is None :
crl = OpenSSL . crypto . CRL ( )
else :
with salt . utils . files . fopen ( crl_file ) as fhr :
crl = OpenSSL . crypto . load_crl ( OpenSSL . crypto . FILETYPE_PEM , fhr . read ( ) )
store . add_crl ( crl )
context = OpenSSL . crypto . X509StoreContext ( store , cert_obj )
ret = { }
try :
context . verify_certificate ( )
ret [ 'valid' ] = True
except OpenSSL . crypto . X509StoreContextError as e :
ret [ 'error' ] = str ( e )
ret [ 'error_cert' ] = e . certificate
ret [ 'valid' ] = False
return ret |
def wait_for_port ( host , port = 22 , timeout = 900 , gateway = None ) :
'''Wait until a connection to the specified port can be made on a specified
host . This is usually port 22 ( for SSH ) , but in the case of Windows
installations , it might be port 445 ( for psexec ) . It may also be an
alternate port for SSH , depending on the base image .''' | start = time . time ( )
# Assign test ports because if a gateway is defined
# we first want to test the gateway before the host .
test_ssh_host = host
test_ssh_port = port
if gateway :
ssh_gateway = gateway [ 'ssh_gateway' ]
ssh_gateway_port = 22
if ':' in ssh_gateway :
ssh_gateway , ssh_gateway_port = ssh_gateway . split ( ':' )
if 'ssh_gateway_port' in gateway :
ssh_gateway_port = gateway [ 'ssh_gateway_port' ]
test_ssh_host = ssh_gateway
test_ssh_port = ssh_gateway_port
log . debug ( 'Attempting connection to host %s on port %s ' 'via gateway %s on port %s' , host , port , ssh_gateway , ssh_gateway_port )
else :
log . debug ( 'Attempting connection to host %s on port %s' , host , port )
trycount = 0
while True :
trycount += 1
try :
if socket . inet_pton ( socket . AF_INET6 , host ) :
sock = socket . socket ( socket . AF_INET6 , socket . SOCK_STREAM )
else :
sock = socket . socket ( socket . AF_INET , socket . SOCK_STREAM )
except socket . error :
sock = socket . socket ( socket . AF_INET , socket . SOCK_STREAM )
try :
sock . settimeout ( 5 )
sock . connect ( ( test_ssh_host , int ( test_ssh_port ) ) )
# Stop any remaining reads / writes on the socket
sock . shutdown ( socket . SHUT_RDWR )
# Close it !
sock . close ( )
break
except socket . error as exc :
log . debug ( 'Caught exception in wait_for_port: %s' , exc )
time . sleep ( 1 )
if time . time ( ) - start > timeout :
log . error ( 'Port connection timed out: %s' , timeout )
return False
log . debug ( 'Retrying connection to %s %s on port %s (try %s)' , 'gateway' if gateway else 'host' , test_ssh_host , test_ssh_port , trycount )
if not gateway :
return True
# Let the user know that his gateway is good !
log . debug ( 'Gateway %s on port %s is reachable.' , test_ssh_host , test_ssh_port )
# Now we need to test the host via the gateway .
# We will use netcat on the gateway to test the port
ssh_args = [ ]
ssh_args . extend ( [ # Don ' t add new hosts to the host key database
'-oStrictHostKeyChecking=no' , # Set hosts key database path to / dev / null , i . e . , non - existing
'-oUserKnownHostsFile=/dev/null' , # Don ' t re - use the SSH connection . Less failures .
'-oControlPath=none' ] )
# There should never be both a password and an ssh key passed in , so
if 'ssh_gateway_key' in gateway :
ssh_args . extend ( [ # tell SSH to skip password authentication
'-oPasswordAuthentication=no' , '-oChallengeResponseAuthentication=no' , # Make sure public key authentication is enabled
'-oPubkeyAuthentication=yes' , # do only use the provided identity file
'-oIdentitiesOnly=yes' , # No Keyboard interaction !
'-oKbdInteractiveAuthentication=no' , # Also , specify the location of the key file
'-i {0}' . format ( gateway [ 'ssh_gateway_key' ] ) ] )
# Netcat command testing remote port
command = 'nc -z -w5 -q0 {0} {1}' . format ( host , port )
# SSH command
pcmd = 'ssh {0} {1}@{2} -p {3} {4}' . format ( ' ' . join ( ssh_args ) , gateway [ 'ssh_gateway_user' ] , ssh_gateway , ssh_gateway_port , pipes . quote ( 'date' ) )
cmd = 'ssh {0} {1}@{2} -p {3} {4}' . format ( ' ' . join ( ssh_args ) , gateway [ 'ssh_gateway_user' ] , ssh_gateway , ssh_gateway_port , pipes . quote ( command ) )
log . debug ( 'SSH command: \'%s\'' , cmd )
kwargs = { 'display_ssh_output' : False , 'password' : gateway . get ( 'ssh_gateway_password' , None ) }
trycount = 0
usable_gateway = False
gateway_retries = 5
while True :
trycount += 1
# test gateway usage
if not usable_gateway :
pstatus = _exec_ssh_cmd ( pcmd , allow_failure = True , ** kwargs )
if pstatus == 0 :
usable_gateway = True
else :
gateway_retries -= 1
log . error ( 'Gateway usage seems to be broken, ' 'password error ? Tries left: %s' , gateway_retries )
if not gateway_retries :
raise SaltCloudExecutionFailure ( 'SSH gateway is reachable but we can not login' )
# then try to reach out the target
if usable_gateway :
status = _exec_ssh_cmd ( cmd , allow_failure = True , ** kwargs )
# Get the exit code of the SSH command .
# If 0 then the port is open .
if status == 0 :
return True
time . sleep ( 1 )
if time . time ( ) - start > timeout :
log . error ( 'Port connection timed out: %s' , timeout )
return False
log . debug ( 'Retrying connection to host %s on port %s ' 'via gateway %s on port %s. (try %s)' , host , port , ssh_gateway , ssh_gateway_port , trycount ) |
def import_reference ( self , refobj ) :
"""Import the reference of the given refobj
Here we assume , that the reference is already in the scene and
we break the encapsulation and pull the data from the reference into
the current scene .
This will call : meth : ` ReftypeInterface . import _ reference ` and set the
reference on the refobj to None .
: param refobj : the refobj with a reference
: type refobj : refobj
: returns : None
: rtype : None
: raises : None""" | inter = self . get_typ_interface ( self . get_typ ( refobj ) )
ref = self . get_reference ( refobj )
inter . import_reference ( refobj , ref )
self . set_reference ( refobj , None ) |
def set_boot_device ( self , device , persistent = False ) :
"""Set the boot device for the node .
Set the boot device to use on next reboot of the node .
: param device : the boot device , one of
: mod : ` ironic . common . boot _ devices ` .
: param persistent : Boolean value . True if the boot device will
persist to all future boots , False if not .
Default : False . Ignored by this driver .
: raises : UcsOperationError if it UCS Manager reports any error .""" | operation = "set_boot_device"
try :
self . sp_manager . create_boot_policy ( )
self . sp_manager . set_boot_device ( device )
except UcsException as ex :
raise exception . UcsOperationError ( operation = operation , error = ex ) |
def discover_roku ( ) :
"""Search LAN for available Roku devices . Returns a Roku object .""" | print ( "Searching for Roku devices within LAN ..." )
rokus = Roku . discover ( )
if not rokus :
print ( "Unable to discover Roku devices. " + "Try again, or manually specify the IP address with " + "\'roku <ipaddr>\' (e.g. roku 192.168.1.130)" )
return None
print ( "Found the following Roku devices:" )
for i , r in enumerate ( rokus ) : # dinfo = ' ' . join ( re . split ( ' , | : ' , str ( r . device _ info ) ) [ 1:3 ] )
dinfo = ''
print ( "[" + str ( i + 1 ) + "] " + str ( r . host ) + ":" + str ( r . port ) + ' (' + dinfo + ')' )
print ( "" )
if len ( rokus ) == 1 :
print ( "Selecting Roku 1 by default" )
return rokus [ 0 ]
else :
print ( "Multiple Rokus found. Select the index of the Roku to control:" )
while True :
try :
query = "Select (1 to " + str ( len ( rokus ) ) + ") > "
sel = int ( input ( query ) ) - 1
if sel >= len ( rokus ) :
raise ValueError
else :
break
except ValueError :
print ( "Invalid selection" )
return rokus [ sel ] |
def patch_node ( self , name , body , ** kwargs ) : # noqa : E501
"""patch _ node # noqa : E501
partially update the specified Node # noqa : E501
This method makes a synchronous HTTP request by default . To make an
asynchronous HTTP request , please pass async _ req = True
> > > thread = api . patch _ node ( name , body , async _ req = True )
> > > result = thread . get ( )
: param async _ req bool
: param str name : name of the Node ( required )
: param UNKNOWN _ BASE _ TYPE body : ( required )
: param str pretty : If ' true ' , then the output is pretty printed .
: param str dry _ run : When present , indicates that modifications should not be persisted . An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request . Valid values are : - All : all dry run stages will be processed
: return : V1Node
If the method is called asynchronously ,
returns the request thread .""" | kwargs [ '_return_http_data_only' ] = True
if kwargs . get ( 'async_req' ) :
return self . patch_node_with_http_info ( name , body , ** kwargs )
# noqa : E501
else :
( data ) = self . patch_node_with_http_info ( name , body , ** kwargs )
# noqa : E501
return data |
def process_bucket_iterator ( bid , prefix = "" , delimiter = "" , ** continuation ) :
"""Bucket pagination""" | log . info ( "Iterating keys bucket %s prefix %s delimiter %s" , bid , prefix , delimiter )
account , bucket = bid . split ( ':' , 1 )
region = connection . hget ( 'bucket-regions' , bid )
versioned = bool ( int ( connection . hget ( 'bucket-versions' , bid ) ) )
session = get_session ( json . loads ( connection . hget ( 'bucket-accounts' , account ) ) )
s3 = session . client ( 's3' , region_name = region , config = s3config )
( contents_key , contents_method , _ ) = BUCKET_OBJ_DESC [ versioned ]
params = dict ( Bucket = bucket )
if prefix :
params [ 'Prefix' ] = prefix
if delimiter :
params [ 'Delimiter' ] = delimiter
if continuation :
params . update ( { k [ 4 : ] : v for k , v in continuation . items ( ) } )
paginator = s3 . get_paginator ( contents_method ) . paginate ( ** params )
with bucket_ops ( bid , 'page' ) :
ptime = time . time ( )
pcounter = 0
for page in paginator :
page = page_strip ( page , versioned )
pcounter += 1
if page :
invoke ( process_keyset , bid , page )
if pcounter % 10 == 0 :
with connection . pipeline ( ) as p :
nptime = time . time ( )
p . hincrby ( 'bucket-pages' , bid , 1 )
p . hincrby ( 'bucket-pages-time' , bid , int ( nptime - ptime ) )
ptime = nptime
p . execute ( )
if pcounter % 10 :
with connection . pipeline ( ) as p :
nptime = time . time ( )
p . hincrby ( 'bucket-pages' , bid , 1 )
p . hincrby ( 'bucket-pages-time' , bid , int ( nptime - ptime ) )
p . execute ( ) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.