signature
stringlengths 29
44.1k
| implementation
stringlengths 0
85.2k
|
|---|---|
def _zforce ( self , R , z , phi = 0. , t = 0. ) :
"""NAME :
zforce
PURPOSE :
evaluate vertical force K _ z ( R , z )
INPUT :
R - Cylindrical Galactocentric radius
z - vertical height
phi - azimuth
t - time
OUTPUT :
K _ z ( R , z )
HISTORY :
2012-12-27 - Written - Bovy ( IAS )"""
|
if self . _new : # if R > 6 . : return self . _ kp ( R , z )
if nu . fabs ( z ) < 10. ** - 6. :
return 0.
kalphamax1 = R
ks1 = kalphamax1 * 0.5 * ( self . _glx + 1. )
weights1 = kalphamax1 * self . _glw
sqrtp = nu . sqrt ( z ** 2. + ( ks1 + R ) ** 2. )
sqrtm = nu . sqrt ( z ** 2. + ( ks1 - R ) ** 2. )
evalInt1 = ks1 ** 2. * special . k0 ( ks1 * self . _alpha ) * ( 1. / sqrtp + 1. / sqrtm ) / nu . sqrt ( R ** 2. + z ** 2. - ks1 ** 2. + sqrtp * sqrtm ) / ( sqrtp + sqrtm )
if R < 10. :
kalphamax2 = 10.
ks2 = ( kalphamax2 - kalphamax1 ) * 0.5 * ( self . _glx + 1. ) + kalphamax1
weights2 = ( kalphamax2 - kalphamax1 ) * self . _glw
sqrtp = nu . sqrt ( z ** 2. + ( ks2 + R ) ** 2. )
sqrtm = nu . sqrt ( z ** 2. + ( ks2 - R ) ** 2. )
evalInt2 = ks2 ** 2. * special . k0 ( ks2 * self . _alpha ) * ( 1. / sqrtp + 1. / sqrtm ) / nu . sqrt ( R ** 2. + z ** 2. - ks2 ** 2. + sqrtp * sqrtm ) / ( sqrtp + sqrtm )
return - z * 2. * nu . sqrt ( 2. ) * self . _alpha * nu . sum ( weights1 * evalInt1 + weights2 * evalInt2 )
else :
return - z * 2. * nu . sqrt ( 2. ) * self . _alpha * nu . sum ( weights1 * evalInt1 )
raise NotImplementedError ( "Not new=True not implemented for RazorThinExponentialDiskPotential" )
|
def list ( self , * args , ** kwargs ) :
"""List networks . Similar to the ` ` docker networks ls ` ` command .
Args :
names ( : py : class : ` list ` ) : List of names to filter by .
ids ( : py : class : ` list ` ) : List of ids to filter by .
filters ( dict ) : Filters to be processed on the network list .
Available filters :
- ` ` driver = [ < driver - name > ] ` ` Matches a network ' s driver .
- ` ` label = [ < key > ] ` ` or ` ` label = [ < key > = < value > ] ` ` .
- ` ` type = [ " custom " | " builtin " ] ` ` Filters networks by type .
greedy ( bool ) : Fetch more details for each network individually .
You might want this to get the containers attached to them .
Returns :
( list of : py : class : ` Network ` ) The networks on the server .
Raises :
: py : class : ` docker . errors . APIError `
If the server returns an error ."""
|
greedy = kwargs . pop ( 'greedy' , False )
resp = self . client . api . networks ( * args , ** kwargs )
networks = [ self . prepare_model ( item ) for item in resp ]
if greedy and version_gte ( self . client . api . _version , '1.28' ) :
for net in networks :
net . reload ( )
return networks
|
def generate_host_passthrough ( self , vcpu_num ) :
"""Generate host - passthrough XML cpu node
Args :
vcpu _ num ( str ) : number of virtual CPUs
Returns :
lxml . etree . Element : CPU XML node"""
|
cpu = ET . Element ( 'cpu' , mode = 'host-passthrough' )
cpu . append ( self . generate_topology ( vcpu_num ) )
if vcpu_num > 1 :
cpu . append ( self . generate_numa ( vcpu_num ) )
return cpu
|
def ExamineEvent ( self , mediator , event ) :
"""Analyzes an event and creates Windows Services as required .
At present , this method only handles events extracted from the Registry .
Args :
mediator ( AnalysisMediator ) : mediates interactions between analysis
plugins and other components , such as storage and dfvfs .
event ( EventObject ) : event to examine ."""
|
# TODO : Handle event log entries here also ( ie , event id 4697 ) .
event_data_type = getattr ( event , 'data_type' , '' )
if event_data_type == 'windows:registry:service' : # Create and store the service .
service = WindowsService . FromEvent ( event )
self . _service_collection . AddService ( service )
|
def pipe_numberinput ( context = None , _INPUT = None , conf = None , ** kwargs ) :
"""An input that prompts the user for a number and yields it forever .
Not loopable .
Parameters
context : pipe2py . Context object
_ INPUT : not used
conf : {
' name ' : { ' value ' : ' parameter name ' } ,
' prompt ' : { ' value ' : ' User prompt ' } ,
' default ' : { ' value ' : ' default value ' } ,
' debug ' : { ' value ' : ' debug value ' }
Yields
_ OUTPUT : text"""
|
value = utils . get_input ( context , conf )
try :
value = int ( value )
except :
value = 0
while True :
yield value
|
def superscript ( self ) :
"""| True | if ` w : vertAlign / @ w : val ` is ' superscript ' . | False | if
` w : vertAlign / @ w : val ` contains any other value . | None | if
` w : vertAlign ` is not present ."""
|
vertAlign = self . vertAlign
if vertAlign is None :
return None
if vertAlign . val == ST_VerticalAlignRun . SUPERSCRIPT :
return True
return False
|
def from_array ( array ) :
"""Deserialize a new EncryptedPassportElement from a given dictionary .
: return : new EncryptedPassportElement instance .
: rtype : EncryptedPassportElement"""
|
if array is None or not array :
return None
# end if
assert_type_or_raise ( array , dict , parameter_name = "array" )
from pytgbot . api_types . receivable . passport import PassportFile
data = { }
data [ 'type' ] = u ( array . get ( 'type' ) )
data [ 'hash' ] = u ( array . get ( 'hash' ) )
data [ 'data' ] = u ( array . get ( 'data' ) ) if array . get ( 'data' ) is not None else None
data [ 'phone_number' ] = u ( array . get ( 'phone_number' ) ) if array . get ( 'phone_number' ) is not None else None
data [ 'email' ] = u ( array . get ( 'email' ) ) if array . get ( 'email' ) is not None else None
data [ 'files' ] = PassportFile . from_array_list ( array . get ( 'files' ) , list_level = 1 ) if array . get ( 'files' ) is not None else None
data [ 'front_side' ] = PassportFile . from_array ( array . get ( 'front_side' ) ) if array . get ( 'front_side' ) is not None else None
data [ 'reverse_side' ] = PassportFile . from_array ( array . get ( 'reverse_side' ) ) if array . get ( 'reverse_side' ) is not None else None
data [ 'selfie' ] = PassportFile . from_array ( array . get ( 'selfie' ) ) if array . get ( 'selfie' ) is not None else None
data [ 'translation' ] = PassportFile . from_array_list ( array . get ( 'translation' ) , list_level = 1 ) if array . get ( 'translation' ) is not None else None
data [ '_raw' ] = array
return EncryptedPassportElement ( ** data )
|
def wrapped_request ( self , request , * args , ** kwargs ) :
"""Create and send a request to the server .
This method implements a very small subset of the options
possible to send an request . It is provided as a shortcut to
sending a simple wrapped request .
Parameters
request : str
The request to call .
* args : list of objects
Arguments to pass on to the request .
Keyword Arguments
timeout : float or None , optional
Timeout after this amount of seconds ( keyword argument ) .
mid : None or int , optional
Message identifier to use for the request message . If None , use either
auto - incrementing value or no mid depending on the KATCP protocol version
( mid ' s were only introduced with KATCP v5 ) and the value of the ` use _ mid `
argument . Defaults to None .
use _ mid : bool
Use a mid for the request if True .
Returns
future object that resolves with the
: meth : ` katcp . client . DeviceClient . future _ request ` response wrapped in
self . reply _ wrapper
Example
wrapped _ reply = yield ic . simple _ request ( ' help ' , ' sensor - list ' )"""
|
f = tornado_Future ( )
try :
use_mid = kwargs . get ( 'use_mid' )
timeout = kwargs . get ( 'timeout' )
mid = kwargs . get ( 'mid' )
msg = Message . request ( request , * args , mid = mid )
except Exception :
f . set_exc_info ( sys . exc_info ( ) )
return f
return transform_future ( self . reply_wrapper , self . katcp_client . future_request ( msg , timeout , use_mid ) )
|
def to_dict ( self ) :
'''Save this wait _ time condition into a dictionary .'''
|
d = super ( WaitTime , self ) . to_dict ( )
d [ 'condition' ] = { 'waitTime' : { 'waitTime' : self . wait_time } }
return d
|
def remove_out_of_bounds_bins ( df , chromosome_size ) : # type : ( pd . DataFrame , int ) - > pd . DataFrame
"""Remove all reads that were shifted outside of the genome endpoints ."""
|
# The dataframe is empty and contains no bins out of bounds
if "Bin" not in df :
return df
df = df . drop ( df [ df . Bin > chromosome_size ] . index )
return df . drop ( df [ df . Bin < 0 ] . index )
|
def reverse_query ( self ) :
'''Changes the coordinates as if the query sequence has been reverse complemented'''
|
self . qry_start = self . qry_length - self . qry_start - 1
self . qry_end = self . qry_length - self . qry_end - 1
|
def to_si ( self , values , from_unit ) :
"""Return values in SI and the units to which the values have been converted ."""
|
if from_unit in self . si_units :
return values , from_unit
elif from_unit == 'ton' :
return self . to_unit ( values , 'tonne' , from_unit ) , 'tonne'
else :
return self . to_unit ( values , 'kg' , from_unit ) , 'kg'
|
def create_selection ( ) :
"""Create a selection expression"""
|
operation = Forward ( )
nested = Group ( Suppress ( "(" ) + operation + Suppress ( ")" ) ) . setResultsName ( "nested" )
select_expr = Forward ( )
functions = select_functions ( select_expr )
maybe_nested = functions | nested | Group ( var_val )
operation <<= maybe_nested + OneOrMore ( oneOf ( "+ - * /" ) + maybe_nested )
select_expr <<= operation | maybe_nested
alias = Group ( Suppress ( upkey ( "as" ) ) + var ) . setResultsName ( "alias" )
full_select = Group ( Group ( select_expr ) . setResultsName ( "selection" ) + Optional ( alias ) )
return Group ( Keyword ( "*" ) | upkey ( "count(*)" ) | delimitedList ( full_select ) ) . setResultsName ( "attrs" )
|
def sp_rand ( m , n , a ) :
"""Generates an mxn sparse ' d ' matrix with round ( a * m * n ) nonzeros ."""
|
if m == 0 or n == 0 :
return spmatrix ( [ ] , [ ] , [ ] , ( m , n ) )
nnz = min ( max ( 0 , int ( round ( a * m * n ) ) ) , m * n )
nz = matrix ( random . sample ( range ( m * n ) , nnz ) , tc = 'i' )
return spmatrix ( normal ( nnz , 1 ) , nz % m , matrix ( [ int ( ii ) for ii in nz / m ] ) , ( m , n ) )
|
def init_UI ( self ) :
"""Set display variables ( font , resolution of GUI , sizer proportions )
then builds the Side bar panel , Top bar panel , and Plots scrolleing
panel which are then placed placed together in a sizer and fit to
the GUI wx . Frame"""
|
# Setup ScrolledPanel Ctrls - - - - -
# set ctrl size and style variables
dw , dh = wx . DisplaySize ( )
r1 = dw / 1210.
r2 = dw / 640.
self . GUI_RESOLUTION = min ( r1 , r2 , 1 )
top_bar_2v_space = 5
top_bar_h_space = 10
spec_button_space = 10
side_bar_v_space = 10
# set font size and style
FONT_WEIGHT = 1
if sys . platform . startswith ( 'win' ) :
FONT_WEIGHT = - 1
font1 = wx . Font ( 9 + FONT_WEIGHT , wx . SWISS , wx . NORMAL , wx . NORMAL , False , self . font_type )
font2 = wx . Font ( 12 + FONT_WEIGHT , wx . SWISS , wx . NORMAL , wx . NORMAL , False , self . font_type )
font = wx . SystemSettings . GetFont ( wx . SYS_SYSTEM_FONT )
font . SetPointSize ( 10 + FONT_WEIGHT )
# Setup ScrolledPanel Ctrls - - - - -
# Create Figures and FigCanvas objects .
self . fig1 = Figure ( ( 5. * self . GUI_RESOLUTION , 5. * self . GUI_RESOLUTION ) , dpi = self . dpi )
self . canvas1 = FigCanvas ( self . scrolled_panel , - 1 , self . fig1 )
self . toolbar1 = NavigationToolbar ( self . canvas1 )
self . toolbar1 . Hide ( )
self . zijderveld_setting = "Zoom"
self . toolbar1 . zoom ( )
self . canvas1 . Bind ( wx . EVT_RIGHT_DOWN , self . right_click_zijderveld )
self . canvas1 . Bind ( wx . EVT_MIDDLE_DOWN , self . home_zijderveld )
self . canvas1 . Bind ( wx . EVT_LEFT_DCLICK , self . on_zijd_select )
self . canvas1 . Bind ( wx . EVT_RIGHT_DCLICK , self . on_zijd_mark )
self . canvas1 . Bind ( wx . EVT_MOTION , self . on_change_zijd_mouse_cursor )
self . canvas1 . SetHelpText ( dgh . zij_help )
self . fig2 = Figure ( ( 2.5 * self . GUI_RESOLUTION , 2.5 * self . GUI_RESOLUTION ) , dpi = self . dpi )
self . specimen_eqarea = self . fig2 . add_subplot ( 111 )
draw_net ( self . specimen_eqarea )
self . canvas2 = FigCanvas ( self . scrolled_panel , - 1 , self . fig2 )
self . toolbar2 = NavigationToolbar ( self . canvas2 )
self . toolbar2 . Hide ( )
self . toolbar2 . zoom ( )
self . specimen_EA_setting = "Zoom"
self . canvas2 . Bind ( wx . EVT_LEFT_DCLICK , self . on_equalarea_specimen_select )
self . canvas2 . Bind ( wx . EVT_RIGHT_DOWN , self . right_click_specimen_equalarea )
self . canvas2 . Bind ( wx . EVT_MOTION , self . on_change_specimen_mouse_cursor )
self . canvas2 . Bind ( wx . EVT_MIDDLE_DOWN , self . home_specimen_equalarea )
self . canvas2 . SetHelpText ( dgh . spec_eqarea_help )
self . specimen_EA_xdata = [ ]
self . specimen_EA_ydata = [ ]
self . fig3 = Figure ( ( 2.5 * self . GUI_RESOLUTION , 2.5 * self . GUI_RESOLUTION ) , dpi = self . dpi )
self . mplot = self . fig3 . add_axes ( [ 0.2 , 0.15 , 0.7 , 0.7 ] , frameon = True , facecolor = 'None' )
self . canvas3 = FigCanvas ( self . scrolled_panel , - 1 , self . fig3 )
self . toolbar3 = NavigationToolbar ( self . canvas3 )
self . toolbar3 . Hide ( )
self . toolbar3 . zoom ( )
self . MM0_setting = "Zoom"
self . canvas3 . Bind ( wx . EVT_RIGHT_DOWN , self . right_click_MM0 )
self . canvas3 . Bind ( wx . EVT_MIDDLE_DOWN , self . home_MM0 )
self . canvas3 . SetHelpText ( dgh . MM0_help )
self . fig4 = Figure ( ( 2.5 * self . GUI_RESOLUTION , 2.5 * self . GUI_RESOLUTION ) , dpi = self . dpi )
self . canvas4 = FigCanvas ( self . scrolled_panel , - 1 , self . fig4 )
self . toolbar4 = NavigationToolbar ( self . canvas4 )
self . toolbar4 . Hide ( )
self . toolbar4 . zoom ( )
self . high_EA_setting = "Zoom"
self . canvas4 . Bind ( wx . EVT_LEFT_DCLICK , self . on_equalarea_high_select )
self . canvas4 . Bind ( wx . EVT_RIGHT_DOWN , self . right_click_high_equalarea )
self . canvas4 . Bind ( wx . EVT_MOTION , self . on_change_high_mouse_cursor )
self . canvas4 . Bind ( wx . EVT_MIDDLE_DOWN , self . home_high_equalarea )
self . canvas4 . SetHelpText ( dgh . high_level_eqarea_help )
self . old_pos = None
self . high_EA_xdata = [ ]
self . high_EA_ydata = [ ]
self . high_level_eqarea = self . fig4 . add_subplot ( 111 )
draw_net ( self . high_level_eqarea )
# High level Stats Sizer and Switch Stats Button
self . stats_sizer = wx . StaticBoxSizer ( wx . StaticBox ( self . panel , wx . ID_ANY , "mean statistics" ) , wx . VERTICAL )
for parameter in [ 'mean_type' , 'dec' , 'inc' , 'alpha95' , 'K' , 'R' , 'n_lines' , 'n_planes' ] :
COMMAND = "self.%s_window=wx.TextCtrl(self.scrolled_panel,style=wx.TE_CENTER|wx.TE_READONLY,size=(50*self.GUI_RESOLUTION,25))" % parameter
exec ( COMMAND )
COMMAND = "self.%s_window.SetBackgroundColour(wx.WHITE)" % parameter
exec ( COMMAND )
COMMAND = "self.%s_window.SetFont(font2)" % parameter
exec ( COMMAND )
COMMAND = "self.%s_outer_window = wx.GridSizer(1,2,5*self.GUI_RESOLUTION,15*self.GUI_RESOLUTION)" % parameter
exec ( COMMAND )
COMMAND = """self.%s_outer_window.AddMany([
(wx.StaticText(self.scrolled_panel,label='%s',style=wx.TE_CENTER),1,wx.EXPAND),
(self.%s_window, 1, wx.EXPAND)])""" % ( parameter , parameter , parameter )
exec ( COMMAND )
COMMAND = "self.stats_sizer.Add(self.%s_outer_window, 1, wx.ALIGN_LEFT|wx.EXPAND)" % parameter
exec ( COMMAND )
self . switch_stats_button = wx . SpinButton ( self . scrolled_panel , id = wx . ID_ANY , style = wx . SP_HORIZONTAL | wx . SP_ARROW_KEYS | wx . SP_WRAP , name = "change stats" )
self . Bind ( wx . EVT_SPIN , self . on_select_stats_button , self . switch_stats_button )
self . switch_stats_button . SetHelpText ( dgh . switch_stats_btn_help )
# Side Bar Options and Logger - - - - -
# Create text _ box for presenting the measurements
self . logger = wx . ListCtrl ( self . side_panel , id = wx . ID_ANY , size = ( 100 * self . GUI_RESOLUTION , 100 * self . GUI_RESOLUTION ) , style = wx . LC_REPORT )
self . logger . SetFont ( font1 )
self . logger . InsertColumn ( 0 , 'i' , width = 25 * self . GUI_RESOLUTION )
self . logger . InsertColumn ( 1 , 'Step' , width = 25 * self . GUI_RESOLUTION )
self . logger . InsertColumn ( 2 , 'Tr' , width = 35 * self . GUI_RESOLUTION )
self . logger . InsertColumn ( 3 , 'Dec' , width = 35 * self . GUI_RESOLUTION )
self . logger . InsertColumn ( 4 , 'Inc' , width = 35 * self . GUI_RESOLUTION )
self . logger . InsertColumn ( 5 , 'M' , width = 45 * self . GUI_RESOLUTION )
self . logger . InsertColumn ( 6 , 'csd' , width = 45 * self . GUI_RESOLUTION )
self . Bind ( wx . EVT_LIST_ITEM_ACTIVATED , self . on_click_listctrl , self . logger )
self . Bind ( wx . EVT_LIST_ITEM_RIGHT_CLICK , self . on_right_click_listctrl , self . logger )
self . Bind ( wx . EVT_LIST_ITEM_SELECTED , self . on_select_measurement , self . logger )
self . logger . SetHelpText ( dgh . logger_help )
# select specimen box
# Combo - box with a list of specimen
self . specimens_box = wx . ComboBox ( self . side_panel , id = wx . ID_ANY , value = self . s , size = ( 200 * self . GUI_RESOLUTION , 25 ) , choices = self . specimens , style = wx . CB_DROPDOWN | wx . TE_PROCESS_ENTER , name = "specimen" )
self . Bind ( wx . EVT_COMBOBOX , self . onSelect_specimen , self . specimens_box )
self . Bind ( wx . EVT_TEXT_ENTER , self . on_enter_specimen , self . specimens_box )
self . specimens_box . SetHelpText ( dgh . specimens_box_help )
# buttons to move forward and backwards from specimens
self . nextbutton = wx . Button ( self . side_panel , id = wx . ID_ANY , label = 'next' , size = ( 100 * self . GUI_RESOLUTION , 25 ) )
self . Bind ( wx . EVT_BUTTON , self . on_next_button , self . nextbutton )
self . nextbutton . SetFont ( font2 )
self . nextbutton . SetHelpText ( dgh . nextbutton_help )
self . prevbutton = wx . Button ( self . side_panel , id = wx . ID_ANY , label = 'previous' , size = ( 100 * self . GUI_RESOLUTION , 25 ) )
self . prevbutton . SetFont ( font2 )
self . Bind ( wx . EVT_BUTTON , self . on_prev_button , self . prevbutton )
self . prevbutton . SetHelpText ( dgh . prevbutton_help )
# select coordinate box
self . COORDINATE_SYSTEM , self . coordinate_list = self . get_coordinate_system ( )
self . coordinates_box = wx . ComboBox ( self . side_panel , id = wx . ID_ANY , size = ( 200 * self . GUI_RESOLUTION , 25 ) , choices = self . coordinate_list , value = self . COORDINATE_SYSTEM , style = wx . CB_DROPDOWN | wx . TE_READONLY , name = "coordinates" )
self . Bind ( wx . EVT_COMBOBOX , self . onSelect_coordinates , self . coordinates_box )
self . coordinates_box . SetHelpText ( dgh . coordinates_box_help )
# Orthogonal Zijderveld Options box
self . orthogonal_box = wx . ComboBox ( self . side_panel , id = wx . ID_ANY , value = 'X=East' , size = ( 200 * self . GUI_RESOLUTION , 25 ) , choices = [ 'X=NRM dec' , 'X=East' , 'X=North' ] , style = wx . CB_DROPDOWN | wx . TE_READONLY , name = "orthogonal_plot" )
# remove ' X = best fit line dec ' as option given that is isn ' t implemented for multiple components
self . Bind ( wx . EVT_COMBOBOX , self . onSelect_orthogonal_box , self . orthogonal_box )
self . orthogonal_box . SetHelpText ( dgh . orthogonal_box_help )
# Top Bar Options - - - - -
# select bounds box
self . T_list = [ ]
self . tmin_box = wx . ComboBox ( self . panel , id = wx . ID_ANY , size = ( 50 * self . GUI_RESOLUTION , 25 ) , choices = self . T_list , style = wx . CB_DROPDOWN | wx . TE_READONLY )
self . Bind ( wx . EVT_COMBOBOX , self . get_new_PCA_parameters , self . tmin_box )
self . tmin_box . SetHelpText ( dgh . tmin_box_help )
self . tmax_box = wx . ComboBox ( self . panel , id = wx . ID_ANY , size = ( 50 * self . GUI_RESOLUTION , 25 ) , choices = self . T_list , style = wx . CB_DROPDOWN | wx . TE_READONLY )
self . Bind ( wx . EVT_COMBOBOX , self . get_new_PCA_parameters , self . tmax_box )
self . tmax_box . SetHelpText ( dgh . tmax_box_help )
# Specimens interpretations Management box
list_fits = [ ]
self . fit_box = wx . ComboBox ( self . panel , id = wx . ID_ANY , size = ( 50 * self . GUI_RESOLUTION , 25 ) , choices = list_fits , style = wx . TE_PROCESS_ENTER )
self . Bind ( wx . EVT_COMBOBOX , self . on_select_fit , self . fit_box )
self . Bind ( wx . EVT_TEXT_ENTER , self . on_enter_fit_name , self . fit_box )
self . fit_box . SetHelpText ( dgh . fit_box_help )
self . add_fit_button = wx . Button ( self . panel , id = wx . ID_ANY , label = 'add fit' , size = ( 50 * self . GUI_RESOLUTION , 25 ) )
self . add_fit_button . SetFont ( font2 )
self . Bind ( wx . EVT_BUTTON , self . on_btn_add_fit , self . add_fit_button )
self . add_fit_button . SetHelpText ( dgh . add_fit_button_help )
# save / delete interpretation buttons
self . save_fit_button = wx . Button ( self . panel , id = wx . ID_ANY , label = 'save' , size = ( 50 * self . GUI_RESOLUTION , 25 ) )
# , style = wx . BU _ EXACTFIT ) # , size = ( 175 , 28 ) )
self . save_fit_button . SetFont ( font2 )
self . save_fit_button . SetHelpText ( dgh . save_fit_btn_help )
self . delete_fit_button = wx . Button ( self . panel , id = wx . ID_ANY , label = 'delete fit' , size = ( 50 * self . GUI_RESOLUTION , 25 ) )
# , style = wx . BU _ EXACTFIT ) # , size = ( 175 , 28 ) )
self . delete_fit_button . SetFont ( font2 )
self . delete_fit_button . SetHelpText ( dgh . delete_fit_btn_help )
self . Bind ( wx . EVT_BUTTON , self . on_save_interpretation_button , self . save_fit_button )
self . Bind ( wx . EVT_BUTTON , self . on_btn_delete_fit , self . delete_fit_button )
# auto - save interpretation buttons
self . auto_save = wx . CheckBox ( self . panel , wx . ID_ANY , 'auto-save' )
if self . preferences [ 'auto_save' ] :
self . auto_save . SetValue ( True )
self . auto_save_info = wx . Button ( self . panel , wx . ID_ANY , "?" )
self . Bind ( wx . EVT_BUTTON , self . on_btn_info_click , self . auto_save_info )
# Interpretation Type and Display window
self . PCA_type_box = wx . ComboBox ( self . panel , id = wx . ID_ANY , size = ( 50 * self . GUI_RESOLUTION , 25 ) , value = 'line' , choices = [ 'line' , 'line-anchored' , 'line-with-origin' , 'plane' , 'Fisher' ] , style = wx . CB_DROPDOWN | wx . TE_READONLY , name = "coordinates" )
self . Bind ( wx . EVT_COMBOBOX , self . on_select_specimen_mean_type_box , self . PCA_type_box )
self . PCA_type_box . SetHelpText ( dgh . PCA_type_help )
self . plane_display_box = wx . ComboBox ( self . panel , id = wx . ID_ANY , size = ( 50 * self . GUI_RESOLUTION , 25 ) , value = 'whole plane' , choices = [ 'whole plane' , 'u. hemisphere' , 'l. hemisphere' , 'poles' , 'bfv' , 'wp + bfv' ] , style = wx . CB_DROPDOWN | wx . TE_READONLY , name = "PlaneType" )
self . Bind ( wx . EVT_COMBOBOX , self . on_select_plane_display_box , self . plane_display_box )
self . plane_display_box . SetHelpText ( dgh . plane_display_help )
# Interpretation Statistics StaticSizer
box_sizer_specimen_stat = wx . StaticBoxSizer ( wx . StaticBox ( self . panel , wx . ID_ANY , "Interpretation Direction and Statistics" ) , wx . HORIZONTAL )
for parameter in [ 'dec' , 'inc' , 'n' , 'mad' , 'dang' , 'alpha95' ] :
COMMAND = "self.s%s_window=wx.TextCtrl(self.panel,style=wx.TE_CENTER|wx.TE_READONLY,size=(25*self.GUI_RESOLUTION,25))" % parameter
exec ( COMMAND )
COMMAND = "self.s%s_window.SetBackgroundColour(wx.WHITE)" % parameter
exec ( COMMAND )
COMMAND = "self.s%s_window.SetFont(font2)" % parameter
exec ( COMMAND )
specimen_stat_window = wx . GridSizer ( 2 , 6 , 0 , 5 )
specimen_stat_window . AddMany ( [ ( wx . StaticText ( self . panel , label = "dec" , style = wx . TE_CENTER ) , 1 , wx . EXPAND | wx . TOP , 2 * top_bar_2v_space ) , ( wx . StaticText ( self . panel , label = "inc" , style = wx . TE_CENTER ) , 1 , wx . EXPAND | wx . TOP , 2 * top_bar_2v_space ) , ( wx . StaticText ( self . panel , label = "n" , style = wx . TE_CENTER ) , 1 , wx . EXPAND | wx . TOP , 2 * top_bar_2v_space ) , ( wx . StaticText ( self . panel , label = "mad" , style = wx . TE_CENTER ) , 1 , wx . EXPAND | wx . TOP , 2 * top_bar_2v_space ) , ( wx . StaticText ( self . panel , label = "dang" , style = wx . TE_CENTER ) , 1 , wx . TE_CENTER | wx . EXPAND | wx . TOP , 2 * top_bar_2v_space ) , ( wx . StaticText ( self . panel , label = "a95" , style = wx . TE_CENTER ) , 1 , wx . TE_CENTER | wx . EXPAND | wx . TOP , 2 * top_bar_2v_space ) , ( self . sdec_window , 1 , wx . EXPAND ) , ( self . sinc_window , 1 , wx . EXPAND ) , ( self . sn_window , 1 , wx . EXPAND ) , ( self . smad_window , 1 , wx . EXPAND ) , ( self . sdang_window , 1 , wx . EXPAND ) , ( self . salpha95_window , 1 , wx . EXPAND ) ] )
box_sizer_specimen_stat . Add ( specimen_stat_window , 1 , wx . ALIGN_LEFT | wx . EXPAND )
# High level mean window
self . level_box = wx . ComboBox ( self . panel , id = wx . ID_ANY , size = ( 50 * self . GUI_RESOLUTION , 25 ) , value = 'site' , choices = [ 'sample' , 'site' , 'location' , 'study' ] , style = wx . CB_DROPDOWN | wx . TE_READONLY , name = "high_level" )
self . Bind ( wx . EVT_COMBOBOX , self . onSelect_high_level , self . level_box )
self . level_box . SetHelpText ( dgh . level_box_help )
self . level_names = wx . ComboBox ( self . panel , id = wx . ID_ANY , size = ( 50 * self . GUI_RESOLUTION , 25 ) , value = self . site , choices = self . sites , style = wx . CB_DROPDOWN | wx . TE_READONLY , name = "high_level_names" )
self . Bind ( wx . EVT_COMBOBOX , self . onSelect_level_name , self . level_names )
self . level_names . SetHelpText ( dgh . level_names_help )
# mean types box
self . mean_type_box = wx . ComboBox ( self . panel , id = wx . ID_ANY , size = ( 50 * self . GUI_RESOLUTION , 25 ) , value = 'None' , choices = [ 'Fisher' , 'Fisher by polarity' , 'None' ] , style = wx . CB_DROPDOWN | wx . TE_READONLY , name = "high_type" )
self . Bind ( wx . EVT_COMBOBOX , self . onSelect_mean_type_box , self . mean_type_box )
self . mean_type_box . SetHelpText ( dgh . mean_type_help )
self . mean_fit_box = wx . ComboBox ( self . panel , id = wx . ID_ANY , size = ( 50 * self . GUI_RESOLUTION , 25 ) , value = 'None' , choices = [ 'None' , 'All' ] + list_fits , style = wx . CB_DROPDOWN | wx . TE_READONLY , name = "high_type" )
self . Bind ( wx . EVT_COMBOBOX , self . onSelect_mean_fit_box , self . mean_fit_box )
self . mean_fit_box . SetHelpText ( dgh . mean_fit_help )
self . mean_fit = 'None'
# Warnings TextCtrl
warning_sizer = wx . StaticBoxSizer ( wx . StaticBox ( self . panel , wx . ID_ANY , "Current Data Warnings" ) , wx . VERTICAL )
self . warning_box = wx . TextCtrl ( self . panel , id = wx . ID_ANY , size = ( 50 * self . GUI_RESOLUTION , 50 + 2 * top_bar_2v_space ) , value = "No Problems" , style = wx . TE_MULTILINE | wx . TE_READONLY | wx . HSCROLL , name = "warning_box" )
self . warning_box . SetHelpText ( dgh . warning_help )
warning_sizer . Add ( self . warning_box , 1 , wx . TOP | wx . EXPAND )
# Design the panel
# Top Bar - - - - -
top_bar_sizer = wx . BoxSizer ( wx . HORIZONTAL )
bounds_sizer = wx . StaticBoxSizer ( wx . StaticBox ( self . panel , wx . ID_ANY , "Bounds" ) , wx . VERTICAL )
bounds_sizer . AddMany ( [ ( self . tmin_box , 1 , wx . ALIGN_TOP | wx . EXPAND | wx . BOTTOM , top_bar_2v_space ) , ( self . tmax_box , 1 , wx . ALIGN_BOTTOM | wx . EXPAND | wx . TOP , top_bar_2v_space ) ] )
top_bar_sizer . Add ( bounds_sizer , 1 , wx . ALIGN_LEFT )
fit_sizer = wx . StaticBoxSizer ( wx . StaticBox ( self . panel , wx . ID_ANY , "Interpretation Options" ) , wx . VERTICAL )
fit_grid = wx . GridSizer ( 2 , 3 , top_bar_h_space , 2 * top_bar_2v_space )
fit_grid . AddMany ( [ ( self . add_fit_button , 1 , wx . ALIGN_TOP | wx . ALIGN_LEFT | wx . EXPAND ) , ( self . save_fit_button , 1 , wx . ALIGN_TOP | wx . ALIGN_LEFT | wx . EXPAND ) , ( self . auto_save , 1 , wx . ALIGN_CENTER ) , ( self . fit_box , 1 , wx . ALIGN_BOTTOM | wx . ALIGN_LEFT | wx . EXPAND ) , ( self . delete_fit_button , 1 , wx . ALIGN_BOTTOM | wx . ALIGN_LEFT | wx . EXPAND ) , ( self . auto_save_info , 1 , wx . ALIGN_CENTER ) ] )
fit_sizer . Add ( fit_grid , 1 , wx . EXPAND )
top_bar_sizer . Add ( fit_sizer , 2 , wx . ALIGN_LEFT | wx . LEFT , top_bar_h_space )
fit_type_sizer = wx . StaticBoxSizer ( wx . StaticBox ( self . panel , wx . ID_ANY , "Interpretation Type" ) , wx . VERTICAL )
fit_type_sizer . AddMany ( [ ( self . PCA_type_box , 1 , wx . ALIGN_TOP | wx . EXPAND | wx . BOTTOM , top_bar_2v_space ) , ( self . plane_display_box , 1 , wx . ALIGN_BOTTOM | wx . EXPAND | wx . TOP , top_bar_2v_space ) ] )
top_bar_sizer . Add ( fit_type_sizer , 1 , wx . ALIGN_LEFT | wx . LEFT , top_bar_h_space )
top_bar_sizer . Add ( box_sizer_specimen_stat , 3 , wx . ALIGN_LEFT | wx . LEFT , top_bar_h_space )
level_sizer = wx . StaticBoxSizer ( wx . StaticBox ( self . panel , wx . ID_ANY , "Display Level" ) , wx . VERTICAL )
level_sizer . AddMany ( [ ( self . level_box , 1 , wx . ALIGN_TOP | wx . EXPAND | wx . BOTTOM , top_bar_2v_space ) , ( self . level_names , 1 , wx . ALIGN_BOTTOM | wx . EXPAND | wx . TOP , top_bar_2v_space ) ] )
top_bar_sizer . Add ( level_sizer , 1 , wx . ALIGN_LEFT | wx . LEFT , top_bar_h_space )
mean_options_sizer = wx . StaticBoxSizer ( wx . StaticBox ( self . panel , wx . ID_ANY , "Mean Options" ) , wx . VERTICAL )
mean_options_sizer . AddMany ( [ ( self . mean_type_box , 1 , wx . ALIGN_TOP | wx . EXPAND | wx . BOTTOM , top_bar_2v_space ) , ( self . mean_fit_box , 1 , wx . ALIGN_BOTTOM | wx . EXPAND | wx . TOP , top_bar_2v_space ) ] )
top_bar_sizer . Add ( mean_options_sizer , 1 , wx . ALIGN_LEFT | wx . LEFT , top_bar_h_space )
top_bar_sizer . Add ( warning_sizer , 2 , wx . ALIGN_LEFT | wx . LEFT , top_bar_h_space )
# Side Bar - - - - -
side_bar_sizer = wx . BoxSizer ( wx . VERTICAL )
spec_sizer = wx . StaticBoxSizer ( wx . StaticBox ( self . side_panel , wx . ID_ANY , "Specimen" ) , wx . VERTICAL )
spec_buttons_sizer = wx . GridSizer ( 1 , 2 , 0 , spec_button_space )
spec_buttons_sizer . AddMany ( [ ( self . prevbutton , 1 , wx . ALIGN_LEFT | wx . EXPAND ) , ( self . nextbutton , 1 , wx . ALIGN_RIGHT | wx . EXPAND ) ] )
spec_sizer . AddMany ( [ ( self . specimens_box , 1 , wx . ALIGN_TOP | wx . EXPAND | wx . BOTTOM , side_bar_v_space / 2 ) , ( spec_buttons_sizer , 1 , wx . ALIGN_BOTTOM | wx . EXPAND | wx . TOP , side_bar_v_space / 2 ) ] )
side_bar_sizer . Add ( spec_sizer , .5 , wx . ALIGN_TOP | wx . EXPAND )
side_bar_sizer . Add ( wx . StaticLine ( self . side_panel ) , .5 , wx . ALL | wx . EXPAND , side_bar_v_space )
coordinate_sizer = wx . StaticBoxSizer ( wx . StaticBox ( self . side_panel , wx . ID_ANY , "Coordinate System" ) , wx . VERTICAL )
coordinate_sizer . Add ( self . coordinates_box , .5 , wx . EXPAND )
side_bar_sizer . Add ( coordinate_sizer , .5 , wx . ALIGN_TOP | wx . EXPAND )
side_bar_sizer . Add ( wx . StaticLine ( self . side_panel ) , .5 , wx . ALL | wx . EXPAND , side_bar_v_space )
zijderveld_option_sizer = wx . StaticBoxSizer ( wx . StaticBox ( self . side_panel , wx . ID_ANY , "Zijderveld Plot Options" ) , wx . VERTICAL )
zijderveld_option_sizer . Add ( self . orthogonal_box , 1 , wx . EXPAND )
side_bar_sizer . Add ( zijderveld_option_sizer , .5 , wx . ALIGN_TOP | wx . EXPAND )
side_bar_sizer . Add ( self . logger , proportion = 1 , flag = wx . ALIGN_TOP | wx . TOP | wx . EXPAND , border = 8 )
# Mean Stats and button Sizer - - - - -
stats_and_button_sizer = wx . BoxSizer ( wx . HORIZONTAL )
stats_and_button_sizer . AddMany ( [ ( self . stats_sizer , 1 , wx . ALIGN_LEFT | wx . EXPAND ) , ( self . switch_stats_button , .3 , wx . ALIGN_RIGHT | wx . EXPAND ) ] )
# EQ area MM0 and stats sizer - - - - -
eqarea_MM0_stats_sizer = wx . GridSizer ( 2 , 2 , 0 , 0 )
eqarea_MM0_stats_sizer . AddMany ( [ ( self . canvas2 , 1 , wx . ALIGN_LEFT | wx . EXPAND ) , ( self . canvas4 , 1 , wx . ALIGN_RIGHT | wx . EXPAND ) , ( self . canvas3 , 1 , wx . ALIGN_LEFT | wx . EXPAND ) , ( stats_and_button_sizer , 1 , wx . ALIGN_RIGHT | wx . EXPAND ) ] )
# Plots and Stats Sizer - - - - -
full_plots_sizer = wx . BoxSizer ( wx . HORIZONTAL )
full_plots_sizer . Add ( self . canvas1 , 1 , wx . ALIGN_LEFT | wx . EXPAND )
full_plots_sizer . Add ( eqarea_MM0_stats_sizer , 1.5 , wx . ALIGN_RIGHT | wx . EXPAND )
self . panel . SetSizerAndFit ( top_bar_sizer )
self . side_panel . SetSizerAndFit ( side_bar_sizer )
self . scrolled_panel . SetSizer ( full_plots_sizer )
# Outer Sizer - - - - -
add_side_bar_sizer = wx . BoxSizer ( wx . HORIZONTAL )
add_side_bar_sizer . Add ( self . side_panel , 1 , wx . ALIGN_LEFT | wx . EXPAND )
add_side_bar_sizer . Add ( self . scrolled_panel , 5 , wx . ALIGN_RIGHT | wx . EXPAND )
outersizer = wx . BoxSizer ( wx . VERTICAL )
outersizer . Add ( self . panel , .2 , wx . ALIGN_TOP | wx . EXPAND )
outersizer . Add ( add_side_bar_sizer , 1 , wx . ALIGN_BOTTOM | wx . EXPAND )
self . SetSizer ( outersizer )
outersizer . Fit ( self )
self . GUI_SIZE = self . GetSize ( )
|
def league_scores ( self , total_data , time , show_upcoming , use_12_hour_format ) :
"""Store output of fixtures based on league and time to a CSV file"""
|
headers = [ 'League' , 'Home Team Name' , 'Home Team Goals' , 'Away Team Goals' , 'Away Team Name' ]
result = [ headers ]
league = total_data [ 'competition' ] [ 'name' ]
result . extend ( [ league , score [ 'homeTeam' ] [ 'name' ] , score [ 'score' ] [ 'fullTime' ] [ 'homeTeam' ] , score [ 'score' ] [ 'fullTime' ] [ 'awayTeam' ] , score [ 'awayTeam' ] [ 'name' ] ] for score in total_data [ 'matches' ] )
self . generate_output ( result )
|
def load_hdf ( cls , filename , path = '' ) :
"""Loads OrbitPopulation from HDF file .
: param filename :
HDF file
: param path :
Path within HDF file store where : class : ` OrbitPopulation ` is saved ."""
|
df = pd . read_hdf ( filename , '{}/df' . format ( path ) )
return cls . from_df ( df )
|
def is_univariate_ca ( self ) :
"""True if cube only contains a CA dimension - pair , in either order ."""
|
return self . ndim == 2 and set ( self . dim_types ) == { DT . CA_SUBVAR , DT . CA_CAT }
|
def on_assign ( self , node ) : # ( ' targets ' , ' value ' )
"""Simple assignment ."""
|
val = self . run ( node . value )
for tnode in node . targets :
self . node_assign ( tnode , val )
return
|
def discover_satellite ( cli , deploy = True , timeout = 5 ) :
"""Looks to make sure a satellite exists , returns endpoint
First makes sure we have dotcloud account credentials . Then it looks
up the environment for the satellite app . This will contain host and
port to construct an endpoint . However , if app doesn ' t exist , or
endpoint does not check out , we call ` launch _ satellite ` to deploy ,
which calls ` discover _ satellite ` again when finished . Ultimately we
return a working endpoint . If deploy is False it will not try to
deploy ."""
|
if not cli . global_config . loaded :
cli . die ( "Please setup skypipe by running `skypipe --setup`" )
try :
endpoint = lookup_endpoint ( cli )
ok = client . check_skypipe_endpoint ( endpoint , timeout )
if ok :
return endpoint
else :
return launch_satellite ( cli ) if deploy else None
except ( RESTAPIError , KeyError ) :
return launch_satellite ( cli ) if deploy else None
|
def __perform_unsolicited_callbacks ( self , msg ) :
"""Callbacks for which a client reference is either optional or does not apply at all"""
|
type_ = msg [ M_TYPE ]
payload = msg [ M_PAYLOAD ]
# callbacks for responses which might be unsolicited ( e . g . created or deleted )
if type_ in _RSP_PAYLOAD_CB_MAPPING :
self . __fire_callback ( _RSP_PAYLOAD_CB_MAPPING [ type_ ] , msg )
# Perform callbacks for feed data
elif type_ == E_FEEDDATA :
self . __simulate_feeddata ( payload [ P_FEED_ID ] , * self . __decode_data_time ( payload ) )
# Perform callbacks for unsolicited subscriber message
elif type_ == E_SUBSCRIBED :
self . __fire_callback ( _CB_SUBSCRIPTION , payload )
else :
logger . error ( 'Unexpected message type for unsolicited callback %s' , type_ )
|
def restore ( self ) :
"""Restore the main dataframe"""
|
if self . backup_df is None :
self . warning ( "No dataframe is backed up: nothing restore" )
return
self . df = self . backup_df
self . ok ( "Dataframe is restored" )
|
def _set_pyqtgraph_title ( layout ) :
"""Private function to add a title to the first row of the window .
Returns True if a Title is set . Else , returns False ."""
|
if 'title_size' in pytplot . tplot_opt_glob :
size = pytplot . tplot_opt_glob [ 'title_size' ]
if 'title_text' in pytplot . tplot_opt_glob :
if pytplot . tplot_opt_glob [ 'title_text' ] != '' :
layout . addItem ( LabelItem ( pytplot . tplot_opt_glob [ 'title_text' ] , size = size , color = 'k' ) , row = 0 , col = 0 )
return True
return False
|
def _example_order_book ( quote_ctx ) :
"""获取摆盘数据 , 输出 买价 , 买量 , 买盘经纪个数 , 卖价 , 卖量 , 卖盘经纪个数"""
|
stock_code_list = [ "US.AAPL" , "HK.00700" ]
# subscribe " ORDER _ BOOK "
ret_status , ret_data = quote_ctx . subscribe ( stock_code_list , ft . SubType . ORDER_BOOK )
if ret_status != ft . RET_OK :
print ( ret_data )
exit ( )
for stk_code in stock_code_list :
ret_status , ret_data = quote_ctx . get_order_book ( stk_code )
if ret_status != ft . RET_OK :
print ( stk_code , ret_data )
exit ( )
print ( "%s ORDER_BOOK" % stk_code )
print ( ret_data )
print ( "\n\n" )
|
def _outfp_write_with_check ( self , outfp , data , enable_overwrite_check = True ) : # type : ( BinaryIO , bytes , bool ) - > None
'''Internal method to write data out to the output file descriptor ,
ensuring that it doesn ' t go beyond the bounds of the ISO .
Parameters :
outfp - The file object to write to .
data - The actual data to write .
enable _ overwrite _ check - Whether to do overwrite checking if it is enabled . Some pieces of code explicitly want to overwrite data , so this allows them to disable the checking .
Returns :
Nothing .'''
|
start = outfp . tell ( )
outfp . write ( data )
if self . _track_writes : # After the write , double check that we didn ' t write beyond the
# boundary of the PVD , and raise a PyCdlibException if we do .
end = outfp . tell ( )
if end > self . pvd . space_size * self . pvd . logical_block_size ( ) :
raise pycdlibexception . PyCdlibInternalError ( 'Wrote past the end of the ISO! (%d > %d)' % ( end , self . pvd . space_size * self . pvd . logical_block_size ( ) ) )
if enable_overwrite_check :
bisect . insort_left ( self . _write_check_list , self . _WriteRange ( start , end - 1 ) )
|
def Box ( pos = ( 0 , 0 , 0 ) , length = 1 , width = 2 , height = 3 , normal = ( 0 , 0 , 1 ) , c = "g" , alpha = 1 , texture = None ) :
"""Build a box of dimensions ` x = length , y = width and z = height ` oriented along vector ` normal ` .
. . hint : : | aspring | | aspring . py | _"""
|
src = vtk . vtkCubeSource ( )
src . SetXLength ( length )
src . SetYLength ( width )
src . SetZLength ( height )
src . Update ( )
poly = src . GetOutput ( )
axis = np . array ( normal ) / np . linalg . norm ( normal )
theta = np . arccos ( axis [ 2 ] )
phi = np . arctan2 ( axis [ 1 ] , axis [ 0 ] )
t = vtk . vtkTransform ( )
t . PostMultiply ( )
t . RotateY ( theta * 57.3 )
t . RotateZ ( phi * 57.3 )
tf = vtk . vtkTransformPolyDataFilter ( )
tf . SetInputData ( poly )
tf . SetTransform ( t )
tf . Update ( )
pd = tf . GetOutput ( )
actor = Actor ( pd , c , alpha , texture = texture )
actor . SetPosition ( pos )
settings . collectable_actors . append ( actor )
return actor
|
def get ( key , service = None , profile = None ) : # pylint : disable = W0613
'''Get a decrypted secret from the tISMd API'''
|
if not profile . get ( 'url' ) or not profile . get ( 'token' ) :
raise SaltConfigurationError ( "url and/or token missing from the tism sdb profile" )
request = { "token" : profile [ 'token' ] , "encsecret" : key }
result = http . query ( profile [ 'url' ] , method = 'POST' , data = salt . utils . json . dumps ( request ) , )
decrypted = result . get ( 'body' )
if not decrypted :
log . warning ( 'tism.get sdb decryption request failed with error %s' , result . get ( 'error' , 'unknown' ) )
return 'ERROR' + six . text_type ( result . get ( 'status' , 'unknown' ) )
return decrypted
|
def make_heap ( n_points , size ) :
"""Constructor for the numba enabled heap objects . The heaps are used
for approximate nearest neighbor search , maintaining a list of potential
neighbors sorted by their distance . We also flag if potential neighbors
are newly added to the list or not . Internally this is stored as
a single ndarray ; the first axis determines whether we are looking at the
array of candidate indices , the array of distances , or the flag array for
whether elements are new or not . Each of these arrays are of shape
( ` ` n _ points ` ` , ` ` size ` ` )
Parameters
n _ points : int
The number of data points to track in the heap .
size : int
The number of items to keep on the heap for each data point .
Returns
heap : An ndarray suitable for passing to other numba enabled heap functions ."""
|
result = np . zeros ( ( 3 , int ( n_points ) , int ( size ) ) , dtype = np . float64 )
result [ 0 ] = - 1
result [ 1 ] = np . infty
result [ 2 ] = 0
return result
|
def peek_step ( self , val : ArrayValue , sn : "DataNode" ) -> Tuple [ ObjectValue , "DataNode" ] :
"""Return the entry addressed by the receiver + its schema node .
Args :
val : Current value ( array ) .
sn : Current schema node ."""
|
keys = self . parse_keys ( sn )
for en in val :
flag = True
try :
for k in keys :
if en [ k ] != keys [ k ] :
flag = False
break
except KeyError :
continue
if flag :
return ( en , sn )
return ( None , sn )
|
def _less_or_close ( a , value , ** kwargs ) :
r"""Compare values for less or close to boolean masks .
Returns a boolean mask for values less than or equal to a target within a specified
absolute or relative tolerance ( as in : func : ` numpy . isclose ` ) .
Parameters
a : array - like
Array of values to be compared
value : float
Comparison value
Returns
array - like
Boolean array where values are less than or nearly equal to value ."""
|
return ( a < value ) | np . isclose ( a , value , ** kwargs )
|
def _get_s3_key ( ) :
'''Get AWS keys from pillar or config'''
|
key = __opts__ [ 's3.key' ] if 's3.key' in __opts__ else None
keyid = __opts__ [ 's3.keyid' ] if 's3.keyid' in __opts__ else None
service_url = __opts__ [ 's3.service_url' ] if 's3.service_url' in __opts__ else None
verify_ssl = __opts__ [ 's3.verify_ssl' ] if 's3.verify_ssl' in __opts__ else None
kms_keyid = __opts__ [ 'aws.kmw.keyid' ] if 'aws.kms.keyid' in __opts__ else None
location = __opts__ [ 's3.location' ] if 's3.location' in __opts__ else None
path_style = __opts__ [ 's3.path_style' ] if 's3.path_style' in __opts__ else None
https_enable = __opts__ [ 's3.https_enable' ] if 's3.https_enable' in __opts__ else None
return key , keyid , service_url , verify_ssl , kms_keyid , location , path_style , https_enable
|
def get_stored_files ( self ) :
"""Check which files are in your temporary storage ."""
|
method = 'GET'
endpoint = '/rest/v1/storage/{}' . format ( self . client . sauce_username )
return self . client . request ( method , endpoint )
|
def _get ( self , action , show , proxy , timeout ) :
"""make HTTP request and cache response"""
|
silent = self . flags [ 'silent' ]
if action in self . cache :
if action != 'imageinfo' and action != 'labels' :
utils . stderr ( "+ %s results in cache" % action , silent )
return
else :
self . cache [ action ] = { }
if self . flags . get ( 'skip' ) and action in self . flags [ 'skip' ] :
if not self . flags [ 'silent' ] :
utils . stderr ( "+ skipping %s" % action )
return
if 'requests' not in self . data :
self . data [ 'requests' ] = [ ]
if len ( self . data [ 'requests' ] ) >= self . REQUEST_LIMIT :
raise StopIteration ( "Hit REQUEST_LIMIT = %d" % self . REQUEST_LIMIT )
if self . data [ 'requests' ] and self . REQUEST_DELAY :
utils . stderr ( "REQUEST_DELAY = %d seconds" % self . REQUEST_DELAY )
sleep ( self . REQUEST_DELAY )
# make the request
qobj = WPToolsQuery ( lang = self . params [ 'lang' ] , variant = self . params . get ( 'variant' ) , wiki = self . params . get ( 'wiki' ) , endpoint = self . params . get ( 'endpoint' ) )
qstr = self . _query ( action , qobj )
req = self . _request ( proxy , timeout )
response = req . get ( qstr , qobj . status )
self . cache [ action ] [ 'query' ] = qstr
self . cache [ action ] [ 'response' ] = response
self . cache [ action ] [ 'info' ] = req . info
self . data [ 'requests' ] . append ( action )
self . _set_data ( action )
if show and not self . flags . get ( 'silent' ) :
self . show ( )
|
def make_time_series ( x , t = pd . Timestamp ( datetime . datetime ( 1970 , 1 , 1 ) ) , freq = None ) :
"""Convert a 2 - D array of time / value pairs ( or pair of time / value vectors ) into a pd . Series time - series
> > > make _ time _ series ( range ( 3 ) , freq = ' 15min ' ) # doctest : + NORMALIZE _ WHITESPACE , + ELLIPSIS
1970-01-01 00:00:00 NaN
1970-01-01 00:15:00 NaN
1970-01-01 00:30:00 NaN
dtype : float64"""
|
if isinstance ( x , pd . DataFrame ) :
x = pd . Series ( x [ x . columns [ 0 ] ] )
elif not isinstance ( x , pd . Series ) and ( not isinstance ( t , ( pd . Series , pd . Index , list , tuple ) ) or not len ( t ) ) : # warnings . warn ( " Coercing a non - Series " )
if len ( x ) == 2 :
t , x = listify ( x [ 0 ] ) , listify ( x [ 1 ] )
elif len ( x ) >= 2 :
try :
t , x = zip ( * x )
except ( ValueError , IndexError , TypeError ) :
pass
x = pd . Series ( x )
else :
if isinstance ( t , ( datetime . datetime , pd . Timestamp ) ) :
t = pd . Timestamp ( t )
else :
x = pd . Series ( listify ( x ) , index = listify ( t ) )
if not isinstance ( x , pd . Series ) :
raise TypeError ( "`pug.invest.util.make_time_series(x, t)` expects x to be a type that" " can be coerced to a Series object, but it's type is: {0}" . format ( type ( x ) ) )
# By this point x must be a Series , only question is whether its index needs to be converted to a DatetimeIndex
if x . index [ 0 ] != 0 and isinstance ( x . index [ 0 ] , ( datetime . date , datetime . datetime , pd . Timestamp , basestring , float , np . int64 , int ) ) :
t = x . index
elif isinstance ( t , ( datetime . date , datetime . datetime , pd . Timestamp , basestring , float , np . int64 , int ) ) :
if not freq :
freq = '15min'
warnings . warn ( 'Assumed time series freq to be {0} though no freq argument was provided!' . format ( freq ) , RuntimeWarning )
t = pd . date_range ( t , periods = len ( x ) , freq = freq )
x = pd . Series ( x , index = t )
if isinstance ( x , pd . Series ) :
x . index = pd . DatetimeIndex ( x . index . values )
return x
|
def ReadFlowProcessingRequests ( self , cursor = None ) :
"""Reads all flow processing requests from the database ."""
|
query = ( "SELECT request, UNIX_TIMESTAMP(timestamp) " "FROM flow_processing_requests" )
cursor . execute ( query )
res = [ ]
for serialized_request , ts in cursor . fetchall ( ) :
req = rdf_flows . FlowProcessingRequest . FromSerializedString ( serialized_request )
req . timestamp = mysql_utils . TimestampToRDFDatetime ( ts )
res . append ( req )
return res
|
def create_long ( self , value : int ) -> Long :
"""Creates a new : class : ` ConstantLong ` , adding it to the pool and
returning it .
: param value : The value of the new long ."""
|
self . append ( ( 5 , value ) )
self . append ( None )
return self . get ( self . raw_count - 2 )
|
def keyPressEvent ( self , evt ) :
"""This handles Ctrl + PageUp , Ctrl + PageDown , Ctrl + Tab , Ctrl + Shift + Tab"""
|
incr = 0
if evt . modifiers ( ) == Qt . ControlModifier :
n = self . tabWidget . count ( )
if evt . key ( ) in [ Qt . Key_PageUp , Qt . Key_Backtab ] :
incr = - 1
elif evt . key ( ) in [ Qt . Key_PageDown , Qt . Key_Tab ] :
incr = 1
if incr != 0 :
new_index = self . _get_tab_index ( ) + incr
if new_index < 0 :
new_index = n - 1
elif new_index >= n :
new_index = 0
self . tabWidget . setCurrentIndex ( new_index )
|
def create_model ( self , parent , name , multiplicity = Multiplicity . ZERO_MANY ) :
"""Create a single part model in this scope .
See : class : ` pykechain . Client . create _ model ` for available parameters ."""
|
return self . _client . create_model ( parent , name , multiplicity = multiplicity )
|
def check_status ( self , job_id ) :
"""Check status of a job ."""
|
response , http_response = self . _client . jobs . get_job ( job_id = job_id ) . result ( )
if http_response . status_code == 404 :
raise HTTPNotFound ( 'The given job ID was not found. Error: {}' . format ( http_response . data ) )
return response
|
def get_course_video_image_url ( course_id , edx_video_id ) :
"""Returns course video image url or None if no image found"""
|
try :
video_image = CourseVideo . objects . select_related ( 'video_image' ) . get ( course_id = course_id , video__edx_video_id = edx_video_id ) . video_image
return video_image . image_url ( )
except ObjectDoesNotExist :
return None
|
def main ( args = None ) :
"""Main entry point"""
|
# parse args
if args is None :
args = parse_args ( sys . argv [ 1 : ] )
# set logging level
if args . verbose > 1 :
set_log_debug ( )
elif args . verbose == 1 :
set_log_info ( )
outpath = os . path . abspath ( os . path . expanduser ( args . out_dir ) )
cachepath = os . path . abspath ( os . path . expanduser ( args . cache_dir ) )
cache = DiskDataCache ( cache_path = cachepath )
if args . user :
args . PROJECT = _pypi_get_projects_for_user ( args . user )
if args . query :
DataQuery ( args . project_id , args . PROJECT , cache ) . run_queries ( backfill_num_days = args . backfill_days )
else :
logger . warning ( 'Query disabled by command-line flag; operating on ' 'cached data only.' )
if not args . generate :
logger . warning ( 'Output generation disabled by command-line flag; ' 'exiting now.' )
raise SystemExit ( 0 )
for proj in args . PROJECT :
logger . info ( 'Generating output for: %s' , proj )
stats = ProjectStats ( proj , cache )
outdir = os . path . join ( outpath , proj )
OutputGenerator ( proj , stats , outdir ) . generate ( )
|
def _Dielectric ( rho , T ) :
"""Equation for the Dielectric constant
Parameters
rho : float
Density , [ kg / m3]
T : float
Temperature , [ K ]
Returns
epsilon : float
Dielectric constant , [ - ]
Notes
Raise : class : ` NotImplementedError ` if input isn ' t in limit :
* 238 ≤ T ≤ 1200
Examples
> > > _ Dielectric ( 999.242866 , 298.15)
78.5907250
> > > _ Dielectric ( 26.0569558 , 873.15)
1.12620970
References
IAPWS , Release on the Static Dielectric Constant of Ordinary Water
Substance for Temperatures from 238 K to 873 K and Pressures up to 1000
MPa , http : / / www . iapws . org / relguide / Dielec . html"""
|
# Check input parameters
if T < 238 or T > 1200 :
raise NotImplementedError ( "Incoming out of bound" )
k = 1.380658e-23
Na = 6.0221367e23
alfa = 1.636e-40
epsilon0 = 8.854187817e-12
mu = 6.138e-30
d = rho / rhoc
Tr = Tc / T
I = [ 1 , 1 , 1 , 2 , 3 , 3 , 4 , 5 , 6 , 7 , 10 , None ]
J = [ 0.25 , 1 , 2.5 , 1.5 , 1.5 , 2.5 , 2 , 2 , 5 , 0.5 , 10 , None ]
n = [ 0.978224486826 , - 0.957771379375 , 0.237511794148 , 0.714692244396 , - 0.298217036956 , - 0.108863472196 , .949327488264e-1 , - .980469816509e-2 , .165167634970e-4 , .937359795772e-4 , - .12317921872e-9 , .196096504426e-2 ]
g = 1 + n [ 11 ] * d / ( Tc / 228 / Tr - 1 ) ** 1.2
for i in range ( 11 ) :
g += n [ i ] * d ** I [ i ] * Tr ** J [ i ]
A = Na * mu ** 2 * rho * g / M * 1000 / epsilon0 / k / T
B = Na * alfa * rho / 3 / M * 1000 / epsilon0
e = ( 1 + A + 5 * B + ( 9 + 2 * A + 18 * B + A ** 2 + 10 * A * B + 9 * B ** 2 ) ** 0.5 ) / 4 / ( 1 - B )
return e
|
def _to_torch ( Z , dtype = None ) :
"""Converts a None , list , np . ndarray , or torch . Tensor to torch . Tensor ;
also handles converting sparse input to dense ."""
|
if Z is None :
return None
elif issparse ( Z ) :
Z = torch . from_numpy ( Z . toarray ( ) )
elif isinstance ( Z , torch . Tensor ) :
pass
elif isinstance ( Z , list ) :
Z = torch . from_numpy ( np . array ( Z ) )
elif isinstance ( Z , np . ndarray ) :
Z = torch . from_numpy ( Z )
else :
msg = ( f"Expected list, numpy.ndarray or torch.Tensor, " f"got {type(Z)} instead." )
raise Exception ( msg )
return Z . type ( dtype ) if dtype else Z
|
def as_slice ( slice_ ) :
"""Convert an object to a slice , if possible"""
|
if isinstance ( slice_ , ( Integral , numpy . integer , type ( None ) ) ) :
return slice ( 0 , None , 1 )
if isinstance ( slice_ , ( slice , numpy . ndarray ) ) :
return slice_
if isinstance ( slice_ , ( list , tuple ) ) :
return tuple ( map ( as_slice , slice_ ) )
raise TypeError ( "Cannot format {!r} as slice" . format ( slice_ ) )
|
def _compute_mean ( self , C , mag , rhypo , hypo_depth , mean , idx ) :
"""Compute mean value according to equations 10 and 11 page 226."""
|
mean [ idx ] = ( C [ 'C1' ] + C [ 'C2' ] * mag + C [ 'C3' ] * np . log ( rhypo [ idx ] + C [ 'C4' ] * np . exp ( C [ 'C5' ] * mag ) ) + C [ 'C6' ] * hypo_depth )
|
def create ( cls , service = Service ( ) , private = False ) :
"""create a bin instance on the server"""
|
response = service . send ( SRequest ( 'POST' , cls . path , data = { 'private' : private } ) )
return cls . from_response ( response , service = service )
|
def plunge_bearing2pole ( plunge , bearing ) :
"""Converts the given ` plunge ` and ` bearing ` in degrees to a strike and dip
of the plane whose pole would be parallel to the line specified . ( i . e . The
pole to the plane returned would plot at the same point as the specified
plunge and bearing . )
Parameters
plunge : number or sequence of numbers
The plunge of the line ( s ) in degrees . The plunge is measured in degrees
downward from the end of the feature specified by the bearing .
bearing : number or sequence of numbers
The bearing ( azimuth ) of the line ( s ) in degrees .
Returns
strike , dip : arrays
Arrays of strikes and dips in degrees following the right - hand - rule ."""
|
plunge , bearing = np . atleast_1d ( plunge , bearing )
strike = bearing + 90
dip = 90 - plunge
strike [ strike >= 360 ] -= 360
return strike , dip
|
def delete ( self ) :
"""Destructor ."""
|
if self . maplesat :
pysolvers . maplechrono_del ( self . maplesat )
self . maplesat = None
if self . prfile :
self . prfile . close ( )
|
def accept ( self ) :
"""Send a response disposition to the service to indicate that
a received message has been accepted . If the client is running in PeekLock
mode , the service will wait on this disposition . Otherwise it will
be ignored . Returns ` True ` is message was accepted , or ` False ` if the message
was already settled .
: rtype : bool
: raises : TypeError if the message is being sent rather than received ."""
|
if self . _can_settle_message ( ) :
self . _response = errors . MessageAccepted ( )
self . _settler ( self . _response )
self . state = constants . MessageState . ReceivedSettled
return True
return False
|
def deprecated ( replacement_description ) :
"""States that method is deprecated .
: param replacement _ description : Describes what must be used instead .
: return : the original method with modified docstring ."""
|
def decorate ( fn_or_class ) :
if isinstance ( fn_or_class , type ) :
pass
# Can ' t change _ _ doc _ _ of type objects
else :
try :
fn_or_class . __doc__ = "This API point is obsolete. %s\n\n%s" % ( replacement_description , fn_or_class . __doc__ , )
except AttributeError :
pass
# For Cython method descriptors , etc .
return fn_or_class
return decorate
|
def _get_foundation_pos ( self , i ) :
"""Private . Get the absolute coordinates to use for a deck ' s
foundation , based on the ` ` starting _ pos _ hint ` ` , the
` ` deck _ hint _ step ` ` , ` ` deck _ x _ hint _ offsets ` ` , and
` ` deck _ y _ hint _ offsets ` ` ."""
|
( phx , phy ) = get_pos_hint ( self . starting_pos_hint , * self . card_size_hint )
phx += self . deck_x_hint_step * i + self . deck_x_hint_offsets [ i ]
phy += self . deck_y_hint_step * i + self . deck_y_hint_offsets [ i ]
x = phx * self . width + self . x
y = phy * self . height + self . y
return ( x , y )
|
def read ( self , entity = None , attrs = None , ignore = None , params = None ) :
"""Provide a default value for ` ` entity ` ` .
By default , ` ` nailgun . entity _ mixins . EntityReadMixin . read ` ` provides a
default value for ` ` entity ` ` like so : :
entity = type ( self ) ( )
However , : class : ` ContentUpload ` requires that a ` ` repository ` ` be
provided , so this technique will not work . Do this instead : :
entity = type ( self ) ( repository = self . repository . id )"""
|
# read ( ) should not change the state of the object it ' s called on , but
# super ( ) alters the attributes of any entity passed in . Creating a new
# object and passing it to super ( ) lets this one avoid changing state .
if entity is None :
entity = type ( self ) ( self . _server_config , repository = self . repository , # pylint : disable = no - member
)
if ignore is None :
ignore = set ( )
ignore . add ( 'repository' )
return super ( ContentUpload , self ) . read ( entity , attrs , ignore , params )
|
def set_row ( x , row ) :
"""We use db 1 , use offset 4 , we replace row x . To find the correct
start _ index we mulitpy by row _ size by x and we put the
byte array representation of row in the PLC"""
|
row_size = 126
set_db_row ( 1 , 4 + x * row_size , row_size , row . _bytearray )
|
def data_properties ( data , mask = None , background = None ) :
"""Calculate the morphological properties ( and centroid ) of a 2D array
( e . g . an image cutout of an object ) using image moments .
Parameters
data : array _ like or ` ~ astropy . units . Quantity `
The 2D array of the image .
mask : array _ like ( bool ) , optional
A boolean mask , with the same shape as ` ` data ` ` , where a ` True `
value indicates the corresponding element of ` ` data ` ` is masked .
Masked data are excluded from all calculations .
background : float , array _ like , or ` ~ astropy . units . Quantity ` , optional
The background level that was previously present in the input
` ` data ` ` . ` ` background ` ` may either be a scalar value or a 2D
image with the same shape as the input ` ` data ` ` . Inputting the
` ` background ` ` merely allows for its properties to be measured
within each source segment . The input ` ` background ` ` does * not *
get subtracted from the input ` ` data ` ` , which should already be
background - subtracted .
Returns
result : ` ~ photutils . segmentation . SourceProperties ` instance
A ` ~ photutils . segmentation . SourceProperties ` object ."""
|
from . . segmentation import SourceProperties
# prevent circular imports
segment_image = np . ones ( data . shape , dtype = np . int )
return SourceProperties ( data , segment_image , label = 1 , mask = mask , background = background )
|
def bootstrap_salt ( name , config = None , approve_key = True , install = True , pub_key = None , priv_key = None , bootstrap_url = None , force_install = False , unconditional_install = False , bootstrap_delay = None , bootstrap_args = None , bootstrap_shell = None ) :
'''Bootstrap a container from package servers , if dist is None the os the
minion is running as will be created , otherwise the needed bootstrapping
tools will need to be available on the host .
CLI Example : :
salt ' * ' nspawn . bootstrap _ salt arch1'''
|
if bootstrap_delay is not None :
try :
time . sleep ( bootstrap_delay )
except TypeError : # Bad input , but assume since a value was passed that
# a delay was desired , and sleep for 5 seconds
time . sleep ( 5 )
c_info = info ( name )
if not c_info :
return None
# default set here as we cannot set them
# in def as it can come from a chain of procedures .
if bootstrap_args : # custom bootstrap args can be totally customized , and user could
# have inserted the placeholder for the config directory .
# For example , some salt bootstrap script do not use at all - c
if '{0}' not in bootstrap_args :
bootstrap_args += ' -c {0}'
else :
bootstrap_args = '-c {0}'
if not bootstrap_shell :
bootstrap_shell = 'sh'
orig_state = _ensure_running ( name )
if not orig_state :
return orig_state
if not force_install :
needs_install = _needs_install ( name )
else :
needs_install = True
seeded = retcode ( name , 'test -e \'{0}\'' . format ( SEED_MARKER ) ) == 0
tmp = tempfile . mkdtemp ( )
if seeded and not unconditional_install :
ret = True
else :
ret = False
cfg_files = __salt__ [ 'seed.mkconfig' ] ( config , tmp = tmp , id_ = name , approve_key = approve_key , pub_key = pub_key , priv_key = priv_key )
if needs_install or force_install or unconditional_install :
if install :
rstr = __salt__ [ 'test.random_hash' ] ( )
configdir = '/tmp/.c_{0}' . format ( rstr )
run ( name , 'install -m 0700 -d {0}' . format ( configdir ) , python_shell = False )
bs_ = __salt__ [ 'config.gather_bootstrap_script' ] ( bootstrap = bootstrap_url )
dest_dir = os . path . join ( '/tmp' , rstr )
for cmd in [ 'mkdir -p {0}' . format ( dest_dir ) , 'chmod 700 {0}' . format ( dest_dir ) , ] :
if run_stdout ( name , cmd ) :
log . error ( 'tmpdir %s creation failed (%s)' , dest_dir , cmd )
return False
copy_to ( name , bs_ , '{0}/bootstrap.sh' . format ( dest_dir ) , makedirs = True )
copy_to ( name , cfg_files [ 'config' ] , os . path . join ( configdir , 'minion' ) )
copy_to ( name , cfg_files [ 'privkey' ] , os . path . join ( configdir , 'minion.pem' ) )
copy_to ( name , cfg_files [ 'pubkey' ] , os . path . join ( configdir , 'minion.pub' ) )
bootstrap_args = bootstrap_args . format ( configdir )
cmd = ( '{0} {2}/bootstrap.sh {1}' . format ( bootstrap_shell , bootstrap_args . replace ( "'" , "''" ) , dest_dir ) )
# log ASAP the forged bootstrap command which can be wrapped
# out of the output in case of unexpected problem
log . info ( 'Running %s in LXC container \'%s\'' , cmd , name )
ret = retcode ( name , cmd , output_loglevel = 'info' , use_vt = True ) == 0
else :
ret = False
else :
minion_config = salt . config . minion_config ( cfg_files [ 'config' ] )
pki_dir = minion_config [ 'pki_dir' ]
copy_to ( name , cfg_files [ 'config' ] , '/etc/salt/minion' )
copy_to ( name , cfg_files [ 'privkey' ] , os . path . join ( pki_dir , 'minion.pem' ) )
copy_to ( name , cfg_files [ 'pubkey' ] , os . path . join ( pki_dir , 'minion.pub' ) )
run ( name , 'salt-call --local service.enable salt-minion' , python_shell = False )
ret = True
shutil . rmtree ( tmp )
if orig_state == 'stopped' :
stop ( name )
# mark seeded upon successful install
if ret :
run ( name , 'touch \'{0}\'' . format ( SEED_MARKER ) , python_shell = False )
return ret
|
def run ( self ) :
"""Run charge balance command"""
|
# Load compound information
def compound_name ( id ) :
if id not in self . _model . compounds :
return id
return self . _model . compounds [ id ] . properties . get ( 'name' , id )
# Create a set of excluded reactions
exclude = set ( self . _args . exclude )
count = 0
unbalanced = 0
unchecked = 0
for reaction , charge in charge_balance ( self . _model ) :
count += 1
if reaction . id in exclude or reaction . equation is None :
continue
if math . isnan ( charge ) :
logger . debug ( 'Not checking reaction {};' ' missing charge' . format ( reaction . id ) )
unchecked += 1
elif abs ( charge ) > self . _args . epsilon :
unbalanced += 1
rxt = reaction . equation . translated_compounds ( compound_name )
print ( '{}\t{}\t{}' . format ( reaction . id , charge , rxt ) )
logger . info ( 'Unbalanced reactions: {}/{}' . format ( unbalanced , count ) )
logger . info ( 'Unchecked reactions due to missing charge: {}/{}' . format ( unchecked , count ) )
logger . info ( 'Reactions excluded from check: {}/{}' . format ( len ( exclude ) , count ) )
|
def get_group_instance ( self , parent ) :
"""Create an instance object"""
|
o = copy . copy ( self )
o . init_instance ( parent )
return o
|
def validate_key ( key , sign = False , encrypt = False ) :
"""Assert that a key is valide and optionally that it can be used for
signing or encrypting . Raise GPGProblem otherwise .
: param key : the GPG key to check
: type key : gpg . gpgme . _ gpgme _ key
: param sign : whether the key should be able to sign
: type sign : bool
: param encrypt : whether the key should be able to encrypt
: type encrypt : bool
: raises ~ alot . errors . GPGProblem : If the key is revoked , expired , or invalid
: raises ~ alot . errors . GPGProblem : If encrypt is true and the key cannot be
used to encrypt
: raises ~ alot . errors . GPGProblem : If sign is true and th key cannot be used
to encrypt"""
|
if key . revoked :
raise GPGProblem ( 'The key "{}" is revoked.' . format ( key . uids [ 0 ] . uid ) , code = GPGCode . KEY_REVOKED )
elif key . expired :
raise GPGProblem ( 'The key "{}" is expired.' . format ( key . uids [ 0 ] . uid ) , code = GPGCode . KEY_EXPIRED )
elif key . invalid :
raise GPGProblem ( 'The key "{}" is invalid.' . format ( key . uids [ 0 ] . uid ) , code = GPGCode . KEY_INVALID )
if encrypt and not key . can_encrypt :
raise GPGProblem ( 'The key "{}" cannot be used to encrypt' . format ( key . uids [ 0 ] . uid ) , code = GPGCode . KEY_CANNOT_ENCRYPT )
if sign and not key . can_sign :
raise GPGProblem ( 'The key "{}" cannot be used to sign' . format ( key . uids [ 0 ] . uid ) , code = GPGCode . KEY_CANNOT_SIGN )
|
def flag_time_err ( phase_err , time_thresh = 0.02 ) :
"""Find large time errors in list .
Scan through a list of tuples of time stamps and phase errors
and return a list of time stamps with timing errors above a threshold .
. . note : :
This becomes important for networks cross - correlations , where
if timing information is uncertain at one site , the relative arrival
time ( lag ) will be incorrect , which will degrade the cross - correlation
sum .
: type phase _ err : list
: param phase _ err : List of Tuple of float , datetime . datetime
: type time _ thresh : float
: param time _ thresh : Threshold to declare a timing error for
: returns : List of : class : ` datetime . datetime ` when timing is questionable ."""
|
time_err = [ ]
for stamp in phase_err :
if abs ( stamp [ 1 ] ) > time_thresh :
time_err . append ( stamp [ 0 ] )
return time_err
|
def get_builds ( galaxy_base ) :
"""Retrieve configured genome builds and reference files , using Galaxy configuration files .
Allows multiple dbkey specifications in the same file , using the most recently added ."""
|
name = "samtools"
galaxy_config = _get_galaxy_tool_info ( galaxy_base )
galaxy_dt = _get_galaxy_data_table ( name , galaxy_config [ "tool_data_table_config_path" ] )
loc_file , need_remap = _get_galaxy_loc_file ( name , galaxy_dt , galaxy_config [ "tool_data_path" ] , galaxy_base )
assert not need_remap , "Should not need to remap reference files"
fnames = { }
for dbkey , fname in _galaxy_loc_iter ( loc_file , galaxy_dt ) :
fnames [ dbkey ] = fname
out = [ ]
for dbkey in sorted ( fnames . keys ( ) ) :
out . append ( ( dbkey , fnames [ dbkey ] ) )
return out
|
def main ( ) :
'''Main function .'''
|
usage = ( '\n\n %prog [options] XML_PATH\n\nArguments:\n\n ' 'XML_PATH the directory containing the ' 'GermaNet .xml files' )
parser = optparse . OptionParser ( usage = usage )
parser . add_option ( '--host' , default = None , help = 'hostname or IP address of the MongoDB instance ' 'where the GermaNet database will be inserted ' '(default: %default)' )
parser . add_option ( '--port' , type = 'int' , default = None , help = 'port number of the MongoDB instance where the ' 'GermaNet database will be inserted (default: %default)' )
parser . add_option ( '--database' , dest = 'database_name' , default = 'germanet' , help = 'the name of the database on the MongoDB instance ' 'where GermaNet will be stored (default: %default)' )
( options , args ) = parser . parse_args ( )
if len ( args ) != 1 :
parser . error ( "incorrect number of arguments" )
sys . exit ( 1 )
xml_path = args [ 0 ]
client = MongoClient ( options . host , options . port )
germanet_db = client [ options . database_name ]
lex_files , gn_rels_file , wiktionary_files , ili_files = find_germanet_xml_files ( xml_path )
insert_lexical_information ( germanet_db , lex_files )
insert_relation_information ( germanet_db , gn_rels_file )
insert_paraphrase_information ( germanet_db , wiktionary_files )
insert_lemmatisation_data ( germanet_db )
insert_infocontent_data ( germanet_db )
compute_max_min_depth ( germanet_db )
client . close ( )
|
def ls ( params = "" , directory = "." , printed = True ) :
"""Know the best python implantation of ls ? It ' s just to subprocess ls . . .
( uses dir on windows ) .
: param params : options to pass to ls or dir
: param directory : if not this directory
: param printed : If you ' re using this , you probably wanted it just printed
: return : if not printed , you can parse it yourself"""
|
command = "{0} {1} {2}" . format ( "ls" if not win_based else "dir" , params , directory )
response = run ( command , shell = True )
# Shell required for windows
response . check_returncode ( )
if printed :
print ( response . stdout . decode ( "utf-8" ) )
else :
return response . stdout
|
def request_ligodotorg ( url , debug = False ) :
"""Request the given URL using LIGO . ORG SAML authentication .
This requires an active Kerberos ticket for the user , to get one :
$ kinit albert . einstein @ LIGO . ORG
Parameters
url : ` str `
URL path for request
debug : ` bool ` , optional
Query in verbose debuggin mode , default ` False `
Returns
urllib . addinfourl
file object containing output data , use . read ( ) to extract
text content"""
|
# set debug to 1 to see all HTTP ( s ) traffic
debug = int ( debug )
# need an instance of HTTPS handler to do HTTPS
httpsHandler = urllib2 . HTTPSHandler ( debuglevel = debug )
# use a cookie jar to store session cookies
jar = cookielib . LWPCookieJar ( )
# if a cookier jar exists open it and read the cookies
# and make sure it has the right permissions
if os . path . exists ( COOKIE_JAR ) :
os . chmod ( COOKIE_JAR , stat . S_IRUSR | stat . S_IWUSR )
# set ignore _ discard so that session cookies are preserved
jar . load ( COOKIE_JAR , ignore_discard = True )
# create a cookie handler from the cookier jar
cookie_handler = urllib2 . HTTPCookieProcessor ( jar )
# need a redirect handler to follow redirects
redirectHandler = urllib2 . HTTPRedirectHandler ( )
# need an auth handler that can do negotiation .
# input parameter is the Kerberos service principal .
auth_handler = HTTPNegotiateAuthHandler ( service_principal = 'HTTP@%s' % ( LIGO_LOGIN_URL ) )
# create the opener .
opener = urllib2 . build_opener ( auth_handler , cookie_handler , httpsHandler , redirectHandler )
# prepare the request object
request = urllib2 . Request ( url )
# use the opener and the request object to make the request .
response = opener . open ( request )
# save the session cookies to a file so that they can
# be used again without having to authenticate
jar . save ( COOKIE_JAR , ignore_discard = True )
return response
|
def _iterate_marginal_coef_slices ( self , i ) :
"""iterator of indices into tensor ' s coef vector for marginal term i ' s coefs
takes a tensor _ term and returns an iterator of indices
that chop up the tensor ' s coef vector into slices belonging to term i
Parameters
i : int ,
index of marginal term
Yields
np . ndarray of ints"""
|
dims = [ term_ . n_coefs for term_ in self ]
# make all linear indices
idxs = np . arange ( np . prod ( dims ) )
# reshape indices to a Nd matrix
idxs = idxs . reshape ( dims )
# reshape to a 2d matrix , where we can loop over rows
idxs = np . moveaxis ( idxs , i , 0 ) . reshape ( idxs . shape [ i ] , int ( idxs . size / idxs . shape [ i ] ) )
# loop over rows
for slice_ in idxs . T :
yield slice_
|
def init_service_processes ( self ) :
"""Prepare processes defined in the * * settings . SERVICE _ PROCESSES * * .
Return : class : ` list ` of the : class : ` ProcessWrapper ` instances ."""
|
processes = [ ]
for process_struct in getattr ( self . context . config . settings , 'SERVICE_PROCESSES' , ( ) ) :
process_cls = import_object ( process_struct [ 0 ] )
wait_unless_ready , timeout = process_struct [ 1 ] , process_struct [ 2 ]
self . logger . info ( "Init service process '%s'" , process_cls . __name__ )
processes . append ( ProcessWrapper ( process_cls , ( self . context , ) , wait_unless_ready = wait_unless_ready , timeout = timeout ) )
return processes
|
def parse ( name , content , releases , get_head_fn ) :
"""Parses the given content for a valid changelog
: param name : str , package name
: param content : str , content
: param releases : list , releases
: param get _ head _ fn : function
: return : dict , changelog"""
|
changelog = { }
releases = frozenset ( releases )
head = False
for line in content . splitlines ( ) :
new_head = get_head_fn ( name = name , line = line , releases = releases )
if new_head :
head = new_head
changelog [ head ] = ""
continue
if not head :
continue
line = line . replace ( "@" , "" )
line = line . replace ( "#" , "" )
changelog [ head ] += line + "\n"
return changelog
|
def iter_processes ( self , proc_filter = None ) :
"""Yields processes from psutil . process _ iter with an optional filter and swallows psutil errors .
If a psutil exception is raised during execution of the filter , that process will not be
yielded but subsequent processes will . On the other hand , if psutil . process _ iter raises
an exception , no more processes will be yielded ."""
|
with swallow_psutil_exceptions ( ) : # process _ iter may raise
for proc in psutil . process_iter ( ) :
with swallow_psutil_exceptions ( ) : # proc _ filter may raise
if ( proc_filter is None ) or proc_filter ( proc ) :
yield proc
|
def create_tab ( self , location = None ) :
"""Create a new tab page ."""
|
eb = self . _get_or_create_editor_buffer ( location )
self . tab_pages . insert ( self . active_tab_index + 1 , TabPage ( Window ( eb ) ) )
self . active_tab_index += 1
|
def extend ( self , api , route = "" , base_url = "" , http = True , cli = True , ** kwargs ) :
"""Adds handlers from a different Hug API to this one - to create a single API"""
|
api = API ( api )
if http and hasattr ( api , '_http' ) :
self . http . extend ( api . http , route , base_url , ** kwargs )
if cli and hasattr ( api , '_cli' ) :
self . cli . extend ( api . cli , ** kwargs )
for directive in getattr ( api , '_directives' , { } ) . values ( ) :
self . add_directive ( directive )
for startup_handler in ( api . startup_handlers or ( ) ) :
self . add_startup_handler ( startup_handler )
|
def _absent ( name , dataset_type , force = False , recursive = False , recursive_all = False ) :
'''internal shared function for * _ absent
name : string
name of dataset
dataset _ type : string [ filesystem , volume , snapshot , or bookmark ]
type of dataset to remove
force : boolean
try harder to destroy the dataset
recursive : boolean
also destroy all the child datasets
recursive _ all : boolean
recursively destroy all dependents , including cloned file systems
outside the target hierarchy . ( - R )'''
|
ret = { 'name' : name , 'changes' : { } , 'result' : True , 'comment' : '' }
# # log configuration
dataset_type = dataset_type . lower ( )
log . debug ( 'zfs.%s_absent::%s::config::force = %s' , dataset_type , name , force )
log . debug ( 'zfs.%s_absent::%s::config::recursive = %s' , dataset_type , name , recursive )
# # destroy dataset if needed
if __salt__ [ 'zfs.exists' ] ( name , ** { 'type' : dataset_type } ) : # # NOTE : dataset found with the name and dataset _ type
if not __opts__ [ 'test' ] :
mod_res = __salt__ [ 'zfs.destroy' ] ( name , ** { 'force' : force , 'recursive' : recursive , 'recursive_all' : recursive_all } )
else :
mod_res = OrderedDict ( [ ( 'destroyed' , True ) ] )
ret [ 'result' ] = mod_res [ 'destroyed' ]
if ret [ 'result' ] :
ret [ 'changes' ] [ name ] = 'destroyed'
ret [ 'comment' ] = '{0} {1} was destroyed' . format ( dataset_type , name , )
else :
ret [ 'comment' ] = 'failed to destroy {0} {1}' . format ( dataset_type , name , )
if 'error' in mod_res :
ret [ 'comment' ] = mod_res [ 'error' ]
else : # # NOTE : no dataset found with name of the dataset _ type
ret [ 'comment' ] = '{0} {1} is absent' . format ( dataset_type , name )
return ret
|
def _parse_xml ( self , xml ) :
"""Extracts the attributes from the XMLElement instance ."""
|
from re import split
vms ( "Parsing <cron> XML child tag." , 2 )
self . frequency = get_attrib ( xml , "frequency" , default = 5 , cast = int )
self . emails = split ( ",\s*" , get_attrib ( xml , "emails" , default = "" ) )
self . notify = split ( ",\s*" , get_attrib ( xml , "notify" , default = "" ) )
|
def _handle_value ( self , value ) :
"""Given a value string , unquote , remove comment ,
handle lists . ( including empty and single member lists )"""
|
if self . _inspec : # Parsing a configspec so don ' t handle comments
return ( value , '' )
# do we look for lists in values ?
if not self . list_values :
mat = self . _nolistvalue . match ( value )
if mat is None :
raise SyntaxError ( )
# NOTE : we don ' t unquote here
return mat . groups ( )
mat = self . _valueexp . match ( value )
if mat is None : # the value is badly constructed , probably badly quoted ,
# or an invalid list
raise SyntaxError ( )
( list_values , single , empty_list , comment ) = mat . groups ( )
if ( list_values == '' ) and ( single is None ) : # change this if you want to accept empty values
raise SyntaxError ( )
# NOTE : note there is no error handling from here if the regex
# is wrong : then incorrect values will slip through
if empty_list is not None : # the single comma - meaning an empty list
return ( [ ] , comment )
if single is not None : # handle empty values
if list_values and not single : # FIXME : the ' ' is a workaround because our regex now matches
# ' ' at the end of a list if it has a trailing comma
single = None
else :
single = single or '""'
single = self . _unquote ( single )
if list_values == '' : # not a list value
return ( single , comment )
the_list = self . _listvalueexp . findall ( list_values )
the_list = [ self . _unquote ( val ) for val in the_list ]
if single is not None :
the_list += [ single ]
return ( the_list , comment )
|
def _enforceDataType ( self , data ) :
"""Converts to float so that this CTI always stores that type .
Replaces infinite with the maximum respresentable float .
Raises a ValueError if data is a NaN ."""
|
value = float ( data )
if math . isnan ( value ) :
raise ValueError ( "FloatCti can't store NaNs" )
if math . isinf ( value ) :
if value > 0 :
logger . warn ( "Replacing inf by the largest representable float" )
value = sys . float_info . max
else :
logger . warn ( "Replacing -inf by the smallest representable float" )
value = - sys . float_info . max
return value
|
def build_response ( self , req , resp ) :
"""Builds a : class : ` Response < requests . Response > ` object from a urllib3
response . This should not be called from user code , and is only exposed
for use when subclassing the
: class : ` HTTPAdapter < requests . adapters . HTTPAdapter > `
: param req : The : class : ` PreparedRequest < PreparedRequest > ` used to generate the response .
: param resp : The urllib3 response object .
: rtype : requests . Response"""
|
response = Response ( )
# Fallback to None if there ' s no status _ code , for whatever reason .
response . status_code = getattr ( resp , 'status' , None )
# Make headers case - insensitive .
response . headers = CaseInsensitiveDict ( getattr ( resp , 'headers' , { } ) )
# Set encoding .
response . encoding = get_encoding_from_headers ( response . headers )
response . raw = resp
response . reason = response . raw . reason
if isinstance ( req . url , bytes ) :
response . url = req . url . decode ( 'utf-8' )
else :
response . url = req . url
# Add new cookies from the server .
extract_cookies_to_jar ( response . cookies , req , resp )
# Give the Response some context .
response . request = req
response . connection = self
return response
|
def get_tree ( cls , parent = None ) :
""": returns :
A * queryset * of nodes ordered as DFS , including the parent .
If no parent is given , all trees are returned ."""
|
cls = get_result_class ( cls )
if parent is None : # return the entire tree
return cls . objects . all ( )
if parent . is_leaf ( ) :
return cls . objects . filter ( pk = parent . pk )
return cls . objects . filter ( tree_id = parent . tree_id , lft__range = ( parent . lft , parent . rgt - 1 ) )
|
def iter_links ( operations , page ) :
"""Generate links for an iterable of operations on a starting page ."""
|
for operation , ns , rule , func in operations :
yield Link . for_ ( operation = operation , ns = ns , type = ns . subject_name , qs = page . to_items ( ) , )
|
def get_platforms ( self , automation_api = 'all' ) :
"""Get a list of objects describing all the OS and browser platforms
currently supported on Sauce Labs ."""
|
method = 'GET'
endpoint = '/rest/v1/info/platforms/{}' . format ( automation_api )
return self . client . request ( method , endpoint )
|
def _renumber ( a : np . ndarray , keys : np . ndarray , values : np . ndarray ) -> np . ndarray :
"""Renumber ' a ' by replacing any occurrence of ' keys ' by the corresponding ' values '"""
|
ordering = np . argsort ( keys )
keys = keys [ ordering ]
values = keys [ ordering ]
index = np . digitize ( a . ravel ( ) , keys , right = True )
return ( values [ index ] . reshape ( a . shape ) )
|
async def disconnect_message ( self , message , context ) :
"""Handle a disconnect message .
See : meth : ` AbstractDeviceAdapter . disconnect ` ."""
|
conn_string = message . get ( 'connection_string' )
client_id = context . user_data
await self . disconnect ( client_id , conn_string )
|
def login_user ( user , remember = False , duration = None , force = False , fresh = True ) :
'''Logs a user in . You should pass the actual user object to this . If the
user ' s ` is _ active ` property is ` ` False ` ` , they will not be logged in
unless ` force ` is ` ` True ` ` .
This will return ` ` True ` ` if the log in attempt succeeds , and ` ` False ` ` if
it fails ( i . e . because the user is inactive ) .
: param user : The user object to log in .
: type user : object
: param remember : Whether to remember the user after their session expires .
Defaults to ` ` False ` ` .
: type remember : bool
: param duration : The amount of time before the remember cookie expires . If
` ` None ` ` the value set in the settings is used . Defaults to ` ` None ` ` .
: type duration : : class : ` datetime . timedelta `
: param force : If the user is inactive , setting this to ` ` True ` ` will log
them in regardless . Defaults to ` ` False ` ` .
: type force : bool
: param fresh : setting this to ` ` False ` ` will log in the user with a session
marked as not " fresh " . Defaults to ` ` True ` ` .
: type fresh : bool'''
|
if not force and not user . is_active :
return False
user_id = getattr ( user , current_app . login_manager . id_attribute ) ( )
session [ 'user_id' ] = user_id
session [ '_fresh' ] = fresh
session [ '_id' ] = current_app . login_manager . _session_identifier_generator ( )
if remember :
session [ 'remember' ] = 'set'
if duration is not None :
try : # equal to timedelta . total _ seconds ( ) but works with Python 2.6
session [ 'remember_seconds' ] = ( duration . microseconds + ( duration . seconds + duration . days * 24 * 3600 ) * 10 ** 6 ) / 10.0 ** 6
except AttributeError :
raise Exception ( 'duration must be a datetime.timedelta, ' 'instead got: {0}' . format ( duration ) )
current_app . login_manager . _update_request_context_with_user ( user )
user_logged_in . send ( current_app . _get_current_object ( ) , user = _get_user ( ) )
return True
|
def get_pmids ( self ) :
"""Get list of all PMIDs associated with edges in the network ."""
|
pmids = [ ]
for ea in self . _edge_attributes . values ( ) :
edge_pmids = ea . get ( 'pmids' )
if edge_pmids :
pmids += edge_pmids
return list ( set ( pmids ) )
|
def wrap_list ( item ) :
"""Returns an object as a list .
If the object is a list , it is returned directly . If it is a tuple or set , it
is returned as a list . If it is another object , it is wrapped in a list and
returned ."""
|
if item is None :
return [ ]
elif isinstance ( item , list ) :
return item
elif isinstance ( item , ( tuple , set ) ) :
return list ( item )
else :
return [ item ]
|
def alterar ( self , id_script_type , type , description ) :
"""Change Script Type from by the identifier .
: param id _ script _ type : Identifier of the Script Type . Integer value and greater than zero .
: param type : Script Type type . String with a minimum 3 and maximum of 40 characters
: param description : Script Type description . String with a minimum 3 and maximum of 100 characters
: return : None
: raise InvalidParameterError : The identifier of Script Type , type or description is null and invalid .
: raise TipoRoteiroNaoExisteError : Script Type not registered .
: raise NomeTipoRoteiroDuplicadoError : Type script already registered with informed .
: raise DataBaseError : Networkapi failed to access the database .
: raise XMLError : Networkapi failed to generate the XML response ."""
|
if not is_valid_int_param ( id_script_type ) :
raise InvalidParameterError ( u'The identifier of Script Type is invalid or was not informed.' )
script_type_map = dict ( )
script_type_map [ 'type' ] = type
script_type_map [ 'description' ] = description
url = 'scripttype/' + str ( id_script_type ) + '/'
code , xml = self . submit ( { 'script_type' : script_type_map } , 'PUT' , url )
return self . response ( code , xml )
|
def punchFile ( rh ) :
"""Punch a file to a virtual reader of the specified virtual machine .
Input :
Request Handle with the following properties :
function - ' CHANGEVM '
subfunction - ' PUNCHFILE '
userid - userid of the virtual machine
parms [ ' class ' ] - Spool class ( optional )
parms [ ' file ' ] - Filespec of the file to punch .
Output :
Request Handle updated with the results .
Return code - 0 : ok , non - zero : error"""
|
rh . printSysLog ( "Enter changeVM.punchFile" )
# Default spool class in " A " , if specified change to specified class
spoolClass = "A"
if 'class' in rh . parms :
spoolClass = str ( rh . parms [ 'class' ] )
punch2reader ( rh , rh . userid , rh . parms [ 'file' ] , spoolClass )
rh . printSysLog ( "Exit changeVM.punchFile, rc: " + str ( rh . results [ 'overallRC' ] ) )
return rh . results [ 'overallRC' ]
|
def _create_default_config_file ( self ) :
"""If config file does not exists create and set default values ."""
|
logger . info ( 'Initialize Maya launcher, creating config file...\n' )
self . add_section ( self . DEFAULTS )
self . add_section ( self . PATTERNS )
self . add_section ( self . ENVIRONMENTS )
self . add_section ( self . EXECUTABLES )
self . set ( self . DEFAULTS , 'executable' , None )
self . set ( self . DEFAULTS , 'environment' , None )
self . set ( self . PATTERNS , 'exclude' , ', ' . join ( self . EXLUDE_PATTERNS ) )
self . set ( self . PATTERNS , 'icon_ext' , ', ' . join ( self . ICON_EXTENSIONS ) )
self . config_file . parent . mkdir ( exist_ok = True )
self . config_file . touch ( )
with self . config_file . open ( 'wb' ) as f :
self . write ( f )
# If this function is run inform the user that a new file has been
# created .
sys . exit ( 'Maya launcher has successfully created config file at:\n' ' "{}"' . format ( str ( self . config_file ) ) )
|
def set_flag_bit ( self , x ) :
"""Function internally used in set _ flags . No multi - line lambdas in python : /
: param x :
: return :"""
|
x = bytes_to_byte ( x )
if KeyTypes . COMM_ENC in self . keys :
x &= ~ 0x8
if KeyTypes . APP_KEY in self . keys :
x &= ~ 0x10
return byte_to_bytes ( x )
|
def calculate_crop_list ( full_page_box_list , bounding_box_list , angle_list , page_nums_to_crop ) :
"""Given a list of full - page boxes ( media boxes ) and a list of tight
bounding boxes for each page , calculate and return another list giving the
list of bounding boxes to crop down to . The parameter ` angle _ list ` is
a list of rotation angles which correspond to the pages . The pages
selected to crop are in the set ` page _ nums _ to _ crop ` ."""
|
# Definition : the deltas are the four differences , one for each margin ,
# between the original full page box and the final , cropped full - page box .
# In the usual case where margin sizes decrease these are the same as the
# four margin - reduction values ( in absolute points ) . The deltas are
# usually positive but they can be negative due to either percentRetain > 100
# or a large enough absolute offset ( in which case the size of the
# corresponding margin will increase ) . When percentRetain < 0 the deltas are
# always greater than the absolute difference between the full page and a
# tight bounding box , and so part of the text within the tight bounding box
# will also be cropped ( unless absolute offsets are used to counter that ) .
num_pages = len ( bounding_box_list )
page_range = range ( num_pages )
num_pages_to_crop = len ( page_nums_to_crop )
# Handle the ' - - samePageSize ' option .
# Note that this is always done first , even before evenodd is handled . It
# is only applied to the pages in the set ` page _ nums _ to _ crop ` .
order_n = 0
if args . samePageSizeOrderStat :
args . samePageSize = True
order_n = min ( args . samePageSizeOrderStat [ 0 ] , num_pages_to_crop - 1 )
order_n = max ( order_n , 0 )
if args . samePageSize :
if args . verbose :
print ( "\nSetting each page size to the smallest box bounding all the pages." )
if order_n != 0 :
print ( "But ignoring the largest {} pages in calculating each edge." . format ( order_n ) )
same_size_bounding_box = [ # We want the smallest of the left and bottom edges .
sorted ( full_page_box_list [ pg ] [ 0 ] for pg in page_nums_to_crop ) , sorted ( full_page_box_list [ pg ] [ 1 ] for pg in page_nums_to_crop ) , # We want the largest of the right and top edges .
sorted ( ( full_page_box_list [ pg ] [ 2 ] for pg in page_nums_to_crop ) , reverse = True ) , sorted ( ( full_page_box_list [ pg ] [ 3 ] for pg in page_nums_to_crop ) , reverse = True ) ]
same_size_bounding_box = [ sortlist [ order_n ] for sortlist in same_size_bounding_box ]
new_full_page_box_list = [ ]
for p_num , box in enumerate ( full_page_box_list ) :
if p_num not in page_nums_to_crop :
new_full_page_box_list . append ( box )
else :
new_full_page_box_list . append ( same_size_bounding_box )
full_page_box_list = new_full_page_box_list
# Handle the ' - - evenodd ' option if it was selected .
if args . evenodd :
even_page_nums_to_crop = { p_num for p_num in page_nums_to_crop if p_num % 2 == 0 }
odd_page_nums_to_crop = { p_num for p_num in page_nums_to_crop if p_num % 2 != 0 }
if args . uniform :
uniform_set_with_even_odd = True
else :
uniform_set_with_even_odd = False
# Recurse on even and odd pages , after resetting some options .
if args . verbose :
print ( "\nRecursively calculating crops for even and odd pages." )
args . evenodd = False
# Avoid infinite recursion .
args . uniform = True
# - - evenodd implies uniform , just on each separate group
even_crop_list = calculate_crop_list ( full_page_box_list , bounding_box_list , angle_list , even_page_nums_to_crop )
odd_crop_list = calculate_crop_list ( full_page_box_list , bounding_box_list , angle_list , odd_page_nums_to_crop )
# Recombine the even and odd pages .
combine_even_odd = [ ]
for p_num in page_range :
if p_num % 2 == 0 :
combine_even_odd . append ( even_crop_list [ p_num ] )
else :
combine_even_odd . append ( odd_crop_list [ p_num ] )
# Handle the case where - - uniform was set with - - evenodd .
if uniform_set_with_even_odd :
min_bottom_margin = min ( box [ 1 ] for p_num , box in enumerate ( combine_even_odd ) if p_num in page_nums_to_crop )
max_top_margin = max ( box [ 3 ] for p_num , box in enumerate ( combine_even_odd ) if p_num in page_nums_to_crop )
combine_even_odd = [ [ box [ 0 ] , min_bottom_margin , box [ 2 ] , max_top_margin ] for box in combine_even_odd ]
return combine_even_odd
# Before calculating the crops we modify the percentRetain and
# absoluteOffset values for all the pages according to any specified .
# rotations for the pages . This is so , for example , uniform cropping is
# relative to what the user actually sees .
rotated_percent_retain = [ mod_box_for_rotation ( args . percentRetain , angle_list [ m_val ] ) for m_val in range ( num_pages ) ]
rotated_absolute_offset = [ mod_box_for_rotation ( args . absoluteOffset , angle_list [ m_val ] ) for m_val in range ( num_pages ) ]
# Calculate the list of deltas to be used to modify the original page
# sizes . Basically , a delta is the absolute diff between the full and
# tight - bounding boxes , scaled according to the user ' s percentRetain , with
# any absolute offset then added ( lb ) or subtracted ( tr ) as appropriate .
# The deltas are all positive unless absoluteOffset changes that or
# percent > 100 . They are added ( lb ) or subtracted ( tr ) as appropriate .
delta_list = [ ]
for p_num , t_box , f_box in zip ( list ( range ( len ( full_page_box_list ) ) ) , bounding_box_list , full_page_box_list ) :
deltas = [ abs ( t_box [ m_val ] - f_box [ m_val ] ) for m_val in range ( 4 ) ]
adj_deltas = [ deltas [ m_val ] * ( 100.0 - rotated_percent_retain [ p_num ] [ m_val ] ) / 100.0 for m_val in range ( 4 ) ]
adj_deltas = [ adj_deltas [ m_val ] + rotated_absolute_offset [ p_num ] [ m_val ] for m_val in range ( 4 ) ]
delta_list . append ( adj_deltas )
# Handle the ' - - uniform ' options if one was selected .
if args . uniformOrderPercent :
percent_val = args . uniformOrderPercent [ 0 ]
if percent_val < 0.0 :
percent_val = 0.0
if percent_val > 100.0 :
percent_val = 100.0
args . uniformOrderStat = [ int ( round ( num_pages_to_crop * percent_val / 100.0 ) ) ]
if args . uniform or args . uniformOrderStat or args . uniformOrderStat4 :
if args . verbose :
print ( "\nAll the selected pages will be uniformly cropped." )
# Expand to tuples containing page nums , to better print verbose information .
delta_list = [ ( delta_list [ j ] , j + 1 ) for j in page_range ]
# Note + 1 added here .
# Only look at the deltas which correspond to pages selected for cropping .
# The values will then be sorted for each margin and selected .
crop_delta_list = [ delta_list [ j ] for j in page_range if j in page_nums_to_crop ]
# Handle order stats ; m _ vals are the four index values into the sorted
# delta lists , one per margin .
m_vals = [ 0 , 0 , 0 , 0 ]
if args . uniformOrderStat4 :
m_vals = args . uniformOrderStat4
elif args . uniformOrderStat :
m_vals = [ args . uniformOrderStat [ 0 ] ] * 4
fixed_m_vals = [ ]
for m_val in m_vals :
if m_val < 0 or m_val >= num_pages_to_crop :
print ( "\nWarning: The selected order statistic is out of range." , "Setting to closest value." , file = sys . stderr )
if m_val >= num_pages_to_crop :
m_val = num_pages_to_crop - 1
if m_val < 0 :
m_val = 0
fixed_m_vals . append ( m_val )
m_vals = fixed_m_vals
if args . verbose and ( args . uniformOrderStat or args . uniformOrderPercent or args . uniformOrderStat4 ) :
print ( "\nPer-margin, the" , m_vals , "smallest delta values over the selected pages\nwill be ignored" " when choosing common, uniform delta values." )
# Get a sorted list of ( delta , page _ num ) tuples for each margin .
left_vals = sorted ( [ ( box [ 0 ] [ 0 ] , box [ 1 ] ) for box in crop_delta_list ] )
lower_vals = sorted ( [ ( box [ 0 ] [ 1 ] , box [ 1 ] ) for box in crop_delta_list ] )
right_vals = sorted ( [ ( box [ 0 ] [ 2 ] , box [ 1 ] ) for box in crop_delta_list ] )
upper_vals = sorted ( [ ( box [ 0 ] [ 3 ] , box [ 1 ] ) for box in crop_delta_list ] )
delta_list = [ [ left_vals [ m_vals [ 0 ] ] [ 0 ] , lower_vals [ m_vals [ 1 ] ] [ 0 ] , right_vals [ m_vals [ 2 ] ] [ 0 ] , upper_vals [ m_vals [ 3 ] ] [ 0 ] ] ] * num_pages
if args . verbose :
delta_page_nums = [ left_vals [ m_vals [ 0 ] ] [ 1 ] , lower_vals [ m_vals [ 1 ] ] [ 1 ] , right_vals [ m_vals [ 2 ] ] [ 1 ] , upper_vals [ m_vals [ 3 ] ] [ 1 ] ]
print ( "\nThe smallest delta values actually used to set the uniform" " cropping\namounts (ignoring any '-m' skips and pages in ranges" " not cropped) were\nfound on these pages, numbered from 1:\n " , delta_page_nums )
print ( "\nThe final delta values themselves are:\n " , delta_list [ 0 ] )
# Apply the delta modifications to the full boxes to get the final sizes .
final_crop_list = [ ]
for f_box , deltas in zip ( full_page_box_list , delta_list ) :
final_crop_list . append ( ( f_box [ 0 ] + deltas [ 0 ] , f_box [ 1 ] + deltas [ 1 ] , f_box [ 2 ] - deltas [ 2 ] , f_box [ 3 ] - deltas [ 3 ] ) )
# Set the page ratios if user chose that option .
if args . setPageRatios :
ratio = args . setPageRatios [ 0 ]
if args . verbose :
print ( "\nSetting all page width to height ratios to:" , ratio )
ratio_set_crop_list = [ ]
for left , bottom , right , top in final_crop_list :
width = right - left
horizontal_center = ( right + left ) / 2.0
height = top - bottom
vertical_center = ( top + bottom ) / 2.0
new_height = width / ratio
if new_height < height :
new_width = height * ratio
assert new_width >= width
ratio_set_crop_list . append ( ( horizontal_center - new_width / 2.0 , bottom , horizontal_center + new_width / 2.0 , top ) )
else :
ratio_set_crop_list . append ( ( left , vertical_center - new_height / 2.0 , right , vertical_center + new_height / 2.0 ) )
final_crop_list = ratio_set_crop_list
return final_crop_list
|
def _quote ( str , LegalChars = _LegalChars ) :
r"""Quote a string for use in a cookie header .
If the string does not need to be double - quoted , then just return the
string . Otherwise , surround the string in doublequotes and quote
( with a \ ) special characters ."""
|
if all ( c in LegalChars for c in str ) :
return str
else :
return '"' + _nulljoin ( _Translator . get ( s , s ) for s in str ) + '"'
|
def add_params ( self , ** kw ) :
"""Add [ possibly many ] parameters to the track .
Parameters will be checked against known UCSC parameters and their
supported formats .
E . g . : :
add _ params ( color = ' 128,0,0 ' , visibility = ' dense ' )"""
|
for k , v in kw . items ( ) :
if ( k not in self . params ) and ( k not in self . specific_params ) :
raise ParameterError ( '"%s" is not a valid parameter for %s' % ( k , self . __class__ . __name__ ) )
try :
self . params [ k ] . validate ( v )
except KeyError :
self . specific_params [ k ] . validate ( v )
self . _orig_kwargs . update ( kw )
self . kwargs = self . _orig_kwargs . copy ( )
|
def createLabels2D ( self ) :
"""2D labeling at zmax"""
|
logger . debug ( " Creating 2D labels..." )
self . zmax = np . argmax ( self . values , axis = 1 )
self . vmax = self . values [ np . arange ( len ( self . pixels ) , dtype = int ) , self . zmax ]
kwargs = dict ( pixels = self . pixels , values = self . vmax , nside = self . nside , threshold = self . threshold , xsize = self . xsize )
labels , nlabels = CandidateSearch . labelHealpix ( ** kwargs )
self . nlabels = nlabels
self . labels = np . repeat ( labels , len ( self . distances ) ) . reshape ( len ( labels ) , len ( self . distances ) )
return self . labels , self . nlabels
|
def roles ( self ) :
"""Access the roles
: returns : twilio . rest . chat . v2 . service . role . RoleList
: rtype : twilio . rest . chat . v2 . service . role . RoleList"""
|
if self . _roles is None :
self . _roles = RoleList ( self . _version , service_sid = self . _solution [ 'sid' ] , )
return self . _roles
|
def apply ( self , cls , originalMemberNameList , classNamingConvention ) :
""": type cls : type
: type originalMemberNameList : list ( str )
: type classNamingConvention : INamingConvention"""
|
self . _memberDelegate . apply ( cls = cls , originalMemberNameList = originalMemberNameList , memberName = self . _memberName , classNamingConvention = classNamingConvention , getter = self . _makeGetter ( ) , setter = self . _makeSetter ( ) )
|
def get_ecdh_key ( self , other ) :
'''other - - > Key25519 instance'''
|
if self . __secretkey is None :
raise KeyTypeError ( 'Wrong key type for operation' )
# Copy because curve25519 _ dh _ CreateSharedKey changes secretkey
tmp_secretkey = ffi . new ( C_SECRETKEY , self . secretkey )
ecdh_key = ffi . new ( C_SHAREDKEY )
lib . curve25519_dh_CreateSharedKey ( ecdh_key , other . pubkey , tmp_secretkey )
del tmp_secretkey
return ffi . get_bytes ( ecdh_key )
|
def K ( self , X , X2 = None , presliced = False ) :
"""Calculates the kernel matrix K ( X , X2 ) ( or K ( X , X ) if X2 is None ) .
Handles the slicing as well as scaling and computes k ( x , x ' ) = k ( r ) ,
where r2 = ( ( x - x ' ) / lengthscales ) 2.
Internally , this calls self . K _ r2 ( r2 ) , which in turn computes the
square - root and calls self . K _ r ( r ) . Classes implementing stationary
kernels can either overwrite ` K _ r2 ( r2 ) ` if they only depend on the
squared distance , or ` K _ r ( r ) ` if they need the actual radial distance ."""
|
if not presliced :
X , X2 = self . _slice ( X , X2 )
return self . K_r2 ( self . scaled_square_dist ( X , X2 ) )
|
def setTargets ( self , targets ) :
"""Sets the targets ."""
|
if not self . verifyArguments ( targets ) and not self . patterned :
raise NetworkError ( 'setTargets() requires [[...],[...],...] or [{"layerName": [...]}, ...].' , targets )
self . targets = targets
|
def open ( self , number = 0 ) :
"""Open the FaderPort and register a callback so we can send and
receive MIDI messages .
: param number : 0 unless you ' ve got more than one FaderPort attached .
In which case 0 is the first , 1 is the second etc
I only have access to a single device so I can ' t
actually test this ."""
|
self . inport = mido . open_input ( find_faderport_input_name ( number ) )
self . outport = mido . open_output ( find_faderport_output_name ( number ) )
self . outport . send ( mido . Message . from_bytes ( [ 0x91 , 0 , 0x64 ] ) )
# A reset message ? ? ?
time . sleep ( 0.01 )
self . inport . callback = self . _message_callback
self . on_open ( )
|
def list_agents ( self ) :
'''List agents hosted by the agency .'''
|
t = text_helper . Table ( fields = ( "Agent ID" , "Agent class" , "State" ) , lengths = ( 40 , 25 , 15 ) )
return t . render ( ( a . _descriptor . doc_id , a . log_category , a . _get_machine_state ( ) . name ) for a in self . _agents )
|
def save_to_file ( self , path ) :
"""Dump all cookies to file .
Cookies are dumped as JSON - serialized dict of keys and values ."""
|
with open ( path , 'w' ) as out :
out . write ( json . dumps ( self . get_dict ( ) ) )
|
def add_device_items ( self , item , device ) :
"""Add the various items from the device to the node
: param str item : item key
: param dict device : dictionary containing items"""
|
if item in ( 'aux' , 'console' ) :
self . node [ 'properties' ] [ item ] = device [ item ]
elif item . startswith ( 'slot' ) : # if self . device _ info [ ' model ' ] = = ' c7200 ' :
# if item ! = ' slot0 ' :
# self . node [ ' properties ' ] [ item ] = device [ item ]
# else :
self . node [ 'properties' ] [ item ] = device [ item ]
elif item == 'connections' :
self . connections = device [ item ]
elif INTERFACE_RE . search ( item ) or VBQ_INT_RE . search ( item ) :
self . interfaces . append ( { 'from' : item , 'to' : device [ item ] } )
elif NUMBER_RE . search ( item ) :
if self . device_info [ 'type' ] == 'EthernetSwitch' :
self . calc_ethsw_port ( item , device [ item ] )
elif self . device_info [ 'type' ] == 'FrameRelaySwitch' :
self . calc_frsw_port ( item , device [ item ] )
elif MAPINT_RE . search ( item ) :
self . add_mapping ( ( item , device [ item ] ) )
elif item == 'cnfg' :
new_config = os . path . join ( 'configs' , 'i%s_startup-config.cfg' % self . node [ 'id' ] )
self . node [ 'properties' ] [ 'startup_config' ] = new_config
self . config . append ( { 'old' : fix_path ( device [ item ] ) , 'new' : new_config } )
elif item . startswith ( 'wic' ) :
self . add_wic ( item , device [ item ] )
elif item == 'symbol' :
self . set_symbol ( device [ item ] )
elif item == 'nics' :
self . node [ 'properties' ] [ 'adapters' ] = device [ item ]
elif item == 'image' :
self . node [ 'properties' ] [ 'vmname' ] = device [ item ]
elif item == 'vbox_id' or item == 'qemu_id' :
self . node [ item ] = device [ item ]
|
def get_dataset_feature_statistics ( builder , split ) :
"""Calculate statistics for the specified split ."""
|
statistics = statistics_pb2 . DatasetFeatureStatistics ( )
# Make this to the best of our abilities .
schema = schema_pb2 . Schema ( )
dataset = builder . as_dataset ( split = split )
# Just computing the number of examples for now .
statistics . num_examples = 0
# Feature dictionaries .
feature_to_num_examples = collections . defaultdict ( int )
feature_to_min = { }
feature_to_max = { }
np_dataset = dataset_utils . as_numpy ( dataset )
for example in utils . tqdm ( np_dataset , unit = " examples" , leave = False ) :
statistics . num_examples += 1
assert isinstance ( example , dict )
feature_names = sorted ( example . keys ( ) )
for feature_name in feature_names : # Update the number of examples this feature appears in .
feature_to_num_examples [ feature_name ] += 1
feature_np = example [ feature_name ]
# For compatibility in graph and eager mode , we can get PODs here and
# everything may not be neatly wrapped up in numpy ' s ndarray .
feature_dtype = type ( feature_np )
if isinstance ( feature_np , np . ndarray ) : # If we have an empty array , then don ' t proceed further with computing
# statistics on it .
if feature_np . size == 0 :
continue
feature_dtype = feature_np . dtype . type
feature_min , feature_max = None , None
is_numeric = ( np . issubdtype ( feature_dtype , np . number ) or feature_dtype == np . bool_ )
if is_numeric :
feature_min = np . min ( feature_np )
feature_max = np . max ( feature_np )
# TODO ( afrozm ) : What if shapes don ' t match ? Populate ValueCount ? Add
# logic for that .
# Set or update the min , max .
if is_numeric :
if ( ( feature_name not in feature_to_min ) or ( feature_to_min [ feature_name ] > feature_min ) ) :
feature_to_min [ feature_name ] = feature_min
if ( ( feature_name not in feature_to_max ) or ( feature_to_max [ feature_name ] < feature_max ) ) :
feature_to_max [ feature_name ] = feature_max
# Start here , we ' ve processed all examples .
output_shapes_dict = dataset . output_shapes
output_types_dict = dataset . output_types
for feature_name in sorted ( feature_to_num_examples . keys ( ) ) : # Try to fill in the schema .
feature = schema . feature . add ( )
feature . name = feature_name
# TODO ( afrozm ) : Make this work with nested structures , currently the Schema
# proto has no support for it .
maybe_feature_shape = output_shapes_dict [ feature_name ]
if not isinstance ( maybe_feature_shape , tf . TensorShape ) :
logging . error ( "Statistics generation doesn't work for nested structures yet" )
continue
for dim in maybe_feature_shape . as_list ( ) : # We denote ` None ` s as - 1 in the shape proto .
feature . shape . dim . add ( ) . size = dim if dim else - 1
feature_type = output_types_dict [ feature_name ]
feature . type = _FEATURE_TYPE_MAP . get ( feature_type , schema_pb2 . BYTES )
common_statistics = statistics_pb2 . CommonStatistics ( )
common_statistics . num_non_missing = feature_to_num_examples [ feature_name ]
common_statistics . num_missing = ( statistics . num_examples - common_statistics . num_non_missing )
feature_name_statistics = statistics . features . add ( )
feature_name_statistics . name = feature_name
# TODO ( afrozm ) : This can be skipped , since type information was added to
# the Schema .
feature_name_statistics . type = _SCHEMA_TYPE_MAP . get ( feature . type , statistics_pb2 . FeatureNameStatistics . BYTES )
if feature . type == schema_pb2 . INT or feature . type == schema_pb2 . FLOAT :
numeric_statistics = statistics_pb2 . NumericStatistics ( )
numeric_statistics . min = feature_to_min [ feature_name ]
numeric_statistics . max = feature_to_max [ feature_name ]
numeric_statistics . common_stats . CopyFrom ( common_statistics )
feature_name_statistics . num_stats . CopyFrom ( numeric_statistics )
else : # Let ' s shove it into BytesStatistics for now .
bytes_statistics = statistics_pb2 . BytesStatistics ( )
bytes_statistics . common_stats . CopyFrom ( common_statistics )
feature_name_statistics . bytes_stats . CopyFrom ( bytes_statistics )
return statistics , schema
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.