idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
38,500
def marginBuy ( self , currencyPair , rate , amount , lendingRate = None ) : return self . _private ( 'marginBuy' , currencyPair = currencyPair , rate = rate , amount = amount , lendingRate = lendingRate )
Places a margin buy order in a given market . Required POST parameters are currencyPair rate and amount . You may optionally specify a maximum lending rate using the lendingRate parameter . If successful the method will return the order number and any trades immediately resulting from your order .
38,501
def marginSell ( self , currencyPair , rate , amount , lendingRate = None ) : return self . _private ( 'marginSell' , currencyPair = currencyPair , rate = rate , amount = amount , lendingRate = lendingRate )
Places a margin sell order in a given market . Parameters and output are the same as for the marginBuy method .
38,502
def returnLendingHistory ( self , start = 0 , end = 2 ** 32 - 1 , limit = None ) : return self . _private ( 'returnLendingHistory' , start = start , end = end , limit = limit )
Returns your lending history within a time range specified by the start and end POST parameters as UNIX timestamps . limit may also be specified to limit the number of rows returned .
38,503
def generate_new_cid ( upstream_cid = None ) : if upstream_cid is None : return str ( uuid . uuid4 ( ) ) if getattr ( settings , 'CID_GENERATE' , False ) else None if ( getattr ( settings , 'CID_CONCATENATE_IDS' , False ) and getattr ( settings , 'CID_GENERATE' , False ) ) : return '%s, %s' % ( upstream_cid , str ( uuid . uuid4 ( ) ) ) return upstream_cid
Generate a new correlation id possibly based on the given one .
38,504
def check_ups_estimated_minutes_remaining ( the_session , the_helper , the_snmp_value ) : the_helper . add_metric ( label = the_helper . options . type , value = the_snmp_value , uom = "minutes" ) the_helper . set_summary ( "Remaining runtime on battery is {} minutes" . format ( the_snmp_value ) )
OID . 1 . 3 . 6 . 1 . 2 . 1 . 33 . 1 . 2 . 3 . 0 MIB excerpt An estimate of the time to battery charge depletion under the present load conditions if the utility power is off and remains off or if it were to be lost and remain off .
38,505
def check_ups_input_frequency ( the_session , the_helper , the_snmp_value ) : a_frequency = calc_frequency_from_snmpvalue ( the_snmp_value ) the_helper . add_metric ( label = the_helper . options . type , value = a_frequency , uom = 'Hz' ) the_helper . set_summary ( "Input Frequency is {} Hz" . format ( a_frequency ) )
OID . 1 . 3 . 6 . 1 . 2 . 1 . 33 . 1 . 3 . 3 . 1 . 2 . 1 MIB excerpt The present input frequency .
38,506
def check_ups_output_current ( the_session , the_helper , the_snmp_value ) : a_current = calc_output_current_from_snmpvalue ( the_snmp_value ) the_helper . add_metric ( label = the_helper . options . type , value = a_current , uom = 'A' ) the_helper . set_summary ( "Output Current is {} A" . format ( a_current ) )
OID . 1 . 3 . 6 . 1 . 2 . 1 . 33 . 1 . 4 . 4 . 1 . 3 . 1 MIB excerpt The present output current .
38,507
def check_ups_alarms_present ( the_session , the_helper , the_snmp_value ) : if the_snmp_value != '0' : the_helper . add_status ( pynag . Plugins . critical ) else : the_helper . add_status ( pynag . Plugins . ok ) the_helper . set_summary ( "{} active alarms " . format ( the_snmp_value ) )
OID . 1 . 3 . 6 . 1 . 2 . 1 . 33 . 1 . 6 . 1 . 0 MIB excerpt The present number of active alarm conditions .
38,508
def check_xups_bat_capacity ( the_session , the_helper , the_snmp_value ) : the_helper . add_metric ( label = the_helper . options . type , value = a_snmp_value , uom = '%' ) the_helper . set_summary ( "Remaining Battery Capacity {} %" . format ( the_snmp_value ) )
OID . 1 . 3 . 6 . 1 . 4 . 1 . 534 . 1 . 2 . 4 . 0 MIB Excerpt Battery percent charge .
38,509
def check_xups_env_ambient_temp ( the_session , the_helper , the_snmp_value , the_unit = 1 ) : the_helper . add_metric ( label = the_helper . options . type , value = the_snmp_value , uom = 'degree' ) the_helper . set_summary ( "Environment Temperature is {} degree" . format ( the_snmp_value ) )
OID . 1 . 3 . 6 . 1 . 4 . 1 . 534 . 1 . 6 . 1 . 0 MIB Excerpt The reading of the ambient temperature in the vicinity of the UPS or SNMP agent .
38,510
def check_ressources ( sess ) : cpu_value = get_data ( sess , cpu_oid , helper ) memory_value = get_data ( sess , memory_oid , helper ) filesystem_value = get_data ( sess , filesystem_oid , helper ) helper . add_summary ( "Controller Status" ) helper . add_long_output ( "Controller Ressources - CPU: %s%%" % cpu_value ) helper . add_metric ( "CPU" , cpu_value , "0:90" , "0:90" , "" , "" , "%%" ) if int ( cpu_value ) > 90 : helper . status ( critical ) helper . add_summary ( "Controller Ressources - CPU: %s%%" % cpu_value ) helper . add_long_output ( "Memory: %s%%" % memory_value ) helper . add_metric ( "Memory" , memory_value , "0:90" , "0:90" , "" , "" , "%%" ) if int ( memory_value ) > 90 : helper . add_summary ( "Memory: %s%%" % memory_value ) helper . status ( critical ) helper . add_long_output ( "Filesystem: %s%%" % filesystem_value ) helper . add_metric ( "Filesystem" , filesystem_value , "0:90" , "0:90" , "" , "" , "%%" ) if int ( filesystem_value ) > 90 : helper . add_summary ( "Filesystem: %s%%" % filesystem_value ) helper . status ( critical )
check the Ressources of the Fortinet Controller all thresholds are currently hard coded . should be fine .
38,511
def check_controller ( sess ) : controller_operational = get_data ( sess , operational_oid , helper ) controller_availability = get_data ( sess , availability_oid , helper ) controller_alarm = get_data ( sess , alarm_oid , helper ) helper . add_summary ( "Controller Status" ) helper . add_long_output ( "Controller Operational State: %s" % operational_states [ int ( controller_operational ) ] ) helper . add_long_output ( "Controller Availability State: %s" % availability_states [ int ( controller_availability ) ] ) helper . add_long_output ( "Controller Alarm State: %s" % alarm_states [ int ( controller_alarm ) ] ) if controller_operational != "1" and controller_operational != "4" : helper . status ( critical ) helper . add_summary ( "Controller Operational State: %s" % operational_states [ int ( controller_operational ) ] ) if controller_availability != "3" : helper . status ( critical ) helper . add_summary ( "Controller Availability State: %s" % availability_states [ int ( controller_availability ) ] ) if controller_alarm == "2" : helper . status ( warning ) helper . add_summary ( "Controller Alarm State: %s" % alarm_states [ int ( controller_alarm ) ] ) if controller_alarm == "3" or controller_alarm == "4" : helper . status ( critical ) helper . add_summary ( "Controller Alarm State: %s" % alarm_states [ int ( controller_alarm ) ] )
check the status of the controller
38,512
def check_accesspoints ( sess ) : ap_names = walk_data ( sess , name_ap_oid , helper ) [ 0 ] ap_operationals = walk_data ( sess , operational_ap_oid , helper ) [ 0 ] ap_availabilitys = walk_data ( sess , availability_ap_oid , helper ) [ 0 ] ap_alarms = walk_data ( sess , alarm_ap_oid , helper ) [ 0 ] helper . add_summary ( "Access Points Status" ) for x in range ( len ( ap_names ) ) : ap_name = ap_names [ x ] ap_operational = ap_operationals [ x ] ap_availability = ap_availabilitys [ x ] ap_alarm = ap_alarms [ x ] helper . add_long_output ( "%s - Operational: %s - Availabilty: %s - Alarm: %s" % ( ap_name , operational_states [ int ( ap_operational ) ] , availability_states [ int ( ap_availability ) ] , alarm_states [ int ( ap_alarm ) ] ) ) if ap_operational != "1" and ap_operational != "4" : helper . status ( critical ) helper . add_summary ( "%s Operational State: %s" % ( ap_name , operational_states [ int ( ap_operational ) ] ) ) if ap_availability != "3" : helper . status ( critical ) helper . add_summary ( "%s Availability State: %s" % ( ap_name , availability_states [ int ( ap_availability ) ] ) ) if ap_alarm == "2" : helper . status ( warning ) helper . add_summary ( "%s Controller Alarm State: %s" % ( ap_name , alarm_states [ int ( ap_alarm ) ] ) ) if ap_alarm == "3" or ap_alarm == "4" : helper . status ( critical ) helper . add_summary ( "%s Controller Alarm State: %s" % ( ap_name , alarm_states [ int ( ap_alarm ) ] ) )
check the status of all connected access points
38,513
def normal_check ( name , status , device_type ) : status_string = NORMAL_STATE . get ( int ( status ) , "unknown" ) if status_string == "ok" : return ok , "{} '{}': {}" . format ( device_type , name , status_string ) elif status_string == "unknown" : return unknown , "{} '{}': {}" . format ( device_type , name , status_string ) return critical , "{} '{}': {}" . format ( device_type , name , status_string )
if the status is ok in the NORMAL_STATE dict return ok + string if the status is not ok return critical + string
38,514
def probe_check ( name , status , device_type ) : status_string = PROBE_STATE . get ( int ( status ) , "unknown" ) if status_string == "ok" : return ok , "{} '{}': {}" . format ( device_type , name , status_string ) if status_string == "unknown" : return unknown , "{} '{}': {}" . format ( device_type , name , status_string ) return critical , "{} '{}': {}" . format ( device_type , name , status_string )
if the status is ok in the PROBE_STATE dict return ok + string if the status is not ok return critical + string
38,515
def add_device_information ( helper , session ) : host_name_data = helper . get_snmp_value ( session , helper , DEVICE_INFORMATION_OIDS [ 'oid_host_name' ] ) product_type_data = helper . get_snmp_value ( session , helper , DEVICE_INFORMATION_OIDS [ 'oid_product_type' ] ) service_tag_data = helper . get_snmp_value ( session , helper , DEVICE_INFORMATION_OIDS [ 'oid_service_tag' ] ) helper . add_summary ( 'Name: {} - Typ: {} - Service tag: {}' . format ( host_name_data , product_type_data , service_tag_data ) )
add general device information to summary
38,516
def process_status ( self , helper , session , check ) : snmp_result_status = helper . get_snmp_value ( session , helper , DEVICE_GLOBAL_OIDS [ 'oid_' + check ] ) if check == "system_lcd" : helper . update_status ( helper , normal_check ( "global" , snmp_result_status , "LCD status" ) ) elif check == "global_storage" : helper . update_status ( helper , normal_check ( "global" , snmp_result_status , "Storage status" ) ) elif check == "system_power" : helper . update_status ( helper , self . check_system_power_status ( snmp_result_status ) ) elif check == "global_system" : helper . update_status ( helper , normal_check ( "global" , snmp_result_status , "Device status" ) )
process a single status
38,517
def process_states ( self , helper , session , check ) : snmp_result_status = helper . walk_snmp_values ( session , helper , DEVICE_STATES_OIDS [ "oid_" + check ] , check ) snmp_result_names = helper . walk_snmp_values ( session , helper , DEVICE_NAMES_OIDS [ "oid_" + check ] , check ) for i , _result in enumerate ( snmp_result_status ) : if check == "power_unit" : helper . update_status ( helper , normal_check ( snmp_result_names [ i ] , snmp_result_status [ i ] , "Power unit" ) ) elif check == "drive" : helper . update_status ( helper , self . check_drives ( snmp_result_names [ i ] , snmp_result_status [ i ] ) ) elif check == "power_unit_redundancy" : helper . update_status ( helper , self . check_power_unit_redundancy ( snmp_result_names [ i ] , snmp_result_status [ i ] ) ) elif check == "chassis_intrusion" : helper . update_status ( helper , normal_check ( snmp_result_names [ i ] , snmp_result_status [ i ] , "Chassis intrusion sensor" ) ) elif check == "cooling_unit" : helper . update_status ( helper , normal_check ( snmp_result_names [ i ] , snmp_result_status [ i ] , "Cooling unit" ) )
process status values from a table
38,518
def process_temperature_sensors ( helper , session ) : snmp_result_temp_sensor_names = helper . walk_snmp_values ( session , helper , DEVICE_TEMPERATURE_OIDS [ 'oid_temperature_probe_location' ] , "temperature sensors" ) snmp_result_temp_sensor_states = helper . walk_snmp_values ( session , helper , DEVICE_TEMPERATURE_OIDS [ 'oid_temperature_probe_status' ] , "temperature sensors" ) snmp_result_temp_sensor_values = helper . walk_snmp_values ( session , helper , DEVICE_TEMPERATURE_OIDS [ 'oid_temperature_probe_reading' ] , "temperature sensors" ) for i , _result in enumerate ( snmp_result_temp_sensor_states ) : helper . update_status ( helper , probe_check ( snmp_result_temp_sensor_names [ i ] , snmp_result_temp_sensor_states [ i ] , "Temperature sensor" ) ) if i < len ( snmp_result_temp_sensor_values ) : helper . add_metric ( label = snmp_result_temp_sensor_names [ i ] + " -Celsius-" , value = float ( snmp_result_temp_sensor_values [ i ] ) / 10 )
process the temperature sensors
38,519
def check_drives ( drivename , drivestatus ) : return DISK_STATES [ int ( drivestatus ) ] [ "icingastatus" ] , "Drive '{}': {}" . format ( drivename , DISK_STATES [ int ( drivestatus ) ] [ "result" ] )
check the drive status
38,520
def check_power_unit_redundancy ( power_unit_name_data , power_unit_redundancy_data ) : return ( POWER_UNIT_REDUNDANCY_STATE [ int ( power_unit_redundancy_data ) ] [ "icingastatus" ] , "Power unit '{}' redundancy: {}" . format ( power_unit_name_data , POWER_UNIT_REDUNDANCY_STATE [ int ( power_unit_redundancy_data ) ] [ "result" ] ) )
check the status of the power units
38,521
def check_temperature_sensors ( ) : env_temp = walk_data ( sess , oid_env_temp , helper ) [ 0 ] env_temp_thresh = walk_data ( sess , oid_env_temp_thres , helper ) [ 0 ] env_temp_zipped = zip ( env_temp , env_temp_thresh ) for x , data in enumerate ( env_temp_zipped , 1 ) : if '-99' not in data and '0' not in data : if int ( data [ 0 ] ) > int ( data [ 1 ] ) : helper . add_summary ( 'Temperature at sensor %d above threshold (%s / %s)' % ( x , data [ 0 ] , data [ 1 ] ) ) helper . status ( critical ) helper . add_long_output ( 'Temperature %d: %s Celsius (threshold: %s Celsius)' % ( x , data [ 0 ] , data [ 1 ] ) ) if x == 1 : helper . add_metric ( "Environment Temperature" , data [ 0 ] , '' , ":" + data [ 1 ] , "" , "" , "Celsius" )
Check all temperature sensors of the server All sensors with the value or threshold is - 99 or 0 are ignored
38,522
def check_fan ( input_fan ) : fan_data = walk_data ( sess , oid_fan , helper ) [ 0 ] fan_count = 0 summary_output = '' long_output = '' for x , fan in enumerate ( fan_data , 1 ) : fan = int ( fan ) if normal_state [ fan ] == 'ok' : fan_count += 1 long_output += 'Fan %d: %s.\n' % ( x , normal_state [ fan ] ) if int ( fan_count ) != int ( input_fan ) : summary_output += '%s fan(s) expected - %s fan(s) ok. ' % ( input_fan , fan_count ) helper . status ( critical ) return ( summary_output , long_output )
check the fans
38,523
def get_snmp_from_host1 ( self ) : response = self . snmp1 . get_oids ( ps1_oid , ps2_oid , fan1_oid , fan2_oid , bat_oid , temp_oid , activity_oid , logfill_oid ) self . ps1_value = states [ int ( response [ 0 ] ) ] self . ps2_value = states [ int ( response [ 1 ] ) ] self . fan1_value = states [ int ( response [ 2 ] ) ] self . fan2_value = states [ int ( response [ 3 ] ) ] self . bat_value = states [ int ( response [ 4 ] ) ] self . temp_value = states [ int ( response [ 5 ] ) ] self . activity_value1 = activity [ int ( response [ 6 ] ) ] self . logfill_value = str ( response [ 7 ] )
Get SNMP values from 1st host .
38,524
def get_snmp_from_host2 ( self ) : if not self . snmp2 : self . activity_value2 = None else : response = self . snmp2 . get_oids ( activity_oid ) self . activity_value2 = activity [ int ( response [ 0 ] ) ]
Get SNMP values from 2nd host .
38,525
def check ( self ) : try : self . get_snmp_from_host1 ( ) self . get_snmp_from_host2 ( ) except ( health_monitoring_plugins . SnmpException , TypeError , KeyError ) : self . helper . status ( unknown ) self . helper . add_summary ( "SNMP response incomplete or invalid" ) return self . helper . add_summary ( "Filter Status" ) self . helper . add_long_output ( "Power Supply 1: %s" % self . ps1_value ) if self . ps1_value != "ok" : self . helper . status ( critical ) self . helper . add_summary ( "Power Supply 1: %s" % self . ps1_value ) self . helper . add_long_output ( "Power Supply 2: %s" % self . ps2_value ) if self . ps2_value != "ok" : self . helper . status ( critical ) self . helper . add_summary ( "Power Supply 2: %s" % self . ps2_value ) self . helper . add_long_output ( "Fan 1: %s" % self . fan1_value ) if self . fan1_value != "ok" : self . helper . status ( critical ) self . helper . add_summary ( "Fan 1: %s" % self . fan1_value ) self . helper . add_long_output ( "Fan 2: %s" % self . fan2_value ) if self . fan2_value != "ok" : self . helper . status ( critical ) self . helper . add_summary ( "Fan 2: %s" % self . fan2_value ) self . helper . add_long_output ( "Battery: %s" % self . bat_value ) if self . bat_value != "ok" : self . helper . status ( critical ) self . helper . add_summary ( "Battery: %s" % self . bat_value ) self . helper . add_long_output ( "Temperature: %s" % self . temp_value ) if self . temp_value != "ok" : self . helper . status ( critical ) self . helper . add_summary ( "Temperature: %s" % self . temp_value ) self . helper . add_metric ( label = 'logfill' , value = self . logfill_value , uom = "%%" ) self . helper . add_long_output ( "Fill Level internal log: %s%%" % self . logfill_value ) self . helper . add_long_output ( "Activity State: %s" % self . activity_value1 ) if self . activity_value1 == "error" : self . helper . status ( critical ) self . helper . add_summary ( "Activity State: %s" % self . activity_value1 ) if self . activity_value2 : self . helper . add_long_output ( "Activity State 2: %s" % self . activity_value2 ) if self . activity_value1 == "active" and self . activity_value2 == "active" : self . helper . status ( critical ) self . helper . add_summary ( "Filter 1 and Filter 2 active!" ) if self . activity_value1 == "standby" and self . activity_value2 == "standby" : self . helper . status ( critical ) self . helper . add_summary ( "Filter 1 and Filter 2 standby!" ) self . helper . check_all_metrics ( )
Evaluate health status from device parameters .
38,526
def update_status ( self , helper , status ) : if status : self . status ( status [ 0 ] ) if status [ 0 ] == 0 : self . add_long_output ( status [ 1 ] ) else : self . add_summary ( status [ 1 ] )
update the helper
38,527
def walk_oid ( self , oid ) : var = netsnmp . Varbind ( oid ) varlist = netsnmp . VarList ( var ) data = self . walk ( varlist ) if len ( data ) == 0 : raise SnmpException ( "SNMP walk response incomplete" ) return varlist
Get a list of SNMP varbinds in response to a walk for oid . Each varbind in response list has a tag iid val and type attribute .
38,528
def run_scan ( ) : all_disks = walk_data ( sess , oid_hrStorageDescr , helper ) [ 0 ] print "All available disks at: " + host for disk in all_disks : print "Disk: \t'" + disk + "'" quit ( )
show all available partitions
38,529
def check_partition ( ) : all_index = walk_data ( sess , oid_hrStorageIndex , helper ) [ 0 ] all_descriptions = walk_data ( sess , oid_hrStorageDescr , helper ) [ 0 ] sucess = False zipped = zip ( all_index , all_descriptions ) for partition in zipped : index = partition [ 0 ] description = partition [ 1 ] if partition_found ( disk , description ) : sucess = True unit = float ( get_data ( sess , oid_hrStorageAllocationUnits + "." + index , helper ) ) size = float ( get_data ( sess , oid_hrStorageSize + "." + index , helper ) ) used = float ( get_data ( sess , oid_hrStorageUsed + "." + index , helper ) ) if size == 0 or used == 0 : helper . exit ( summary = "Received value 0 as StorageSize or StorageUsed: calculation error" , exit_code = unknown , perfdata = '' ) used_result = convert_to_XX ( calculate_real_size ( used ) , unit , targetunit ) size_result = convert_to_XX ( calculate_real_size ( size ) , unit , targetunit ) percent_used = used_result / size_result * 100 used_string = str ( float ( "{0:.2f}" . format ( used_result ) ) ) size_string = str ( float ( "{0:.2f}" . format ( size_result ) ) ) percent_string = str ( float ( "{0:.2f}" . format ( percent_used ) ) ) if percent_used < 0 or percent_used > 100 : helper . exit ( summary = "Calculation error - second counter overrun?" , exit_code = unknown , perfdata = '' ) helper . add_summary ( "%s%% used (%s%s of %s%s) at '%s'" % ( percent_string , used_string , targetunit , size_string , targetunit , description ) ) helper . add_metric ( label = 'percent used' , value = percent_string , min = "0" , max = "100" , uom = "%" ) else : if not sucess : helper . exit ( summary = "Partition '%s' not found" % disk , exit_code = unknown , perfdata = '' )
check the defined partition
38,530
def check_sensors ( ) : all_sensors = walk_data ( sess , oid_description , helper ) [ 0 ] all_status = walk_data ( sess , oid_status , helper ) [ 0 ] zipped = zip ( all_sensors , all_status ) for sensor in zipped : description = sensor [ 0 ] status = sensor [ 1 ] try : status_string = senor_status_table [ status ] except KeyError : helper . exit ( summary = "received an undefined value from device: " + status , exit_code = unknown , perfdata = '' ) helper . add_summary ( "%s: %s" % ( description , status_string ) ) if status == "2" : helper . status ( critical ) if status == "3" : helper . status ( warning )
collect and check all available sensors
38,531
def check_runtime_remaining ( the_session , the_helper , the_snmp_value ) : a_minute_value = calc_minutes_from_ticks ( the_snmp_value ) the_helper . add_metric ( label = the_helper . options . type , value = a_minute_value , warn = the_helper . options . warning , crit = the_helper . options . critical , uom = "Minutes" ) the_helper . check_all_metrics ( ) the_helper . set_summary ( "Remaining runtime on battery is {} minutes" . format ( a_minute_value ) )
OID . 1 . 3 . 6 . 1 . 4 . 1 . 318 . 1 . 1 . 1 . 2 . 2 . 3 . 0 MIB excerpt The UPS battery run time remaining before battery exhaustion . SNMP value is in TimeTicks aka hundredths of a second
38,532
def check_typ ( helper , typ ) : if typ != "tcp" and typ != "udp" : helper . exit ( summary = "Type (-t) must be udp or tcp." , exit_code = unknown , perfdata = '' )
check if typ parameter is TCP or UDP
38,533
def check_port ( helper , port ) : try : int ( port ) except ValueError : helper . exit ( summary = "Port (-p) must be a integer value." , exit_code = unknown , perfdata = '' )
check if the port parameter is really a port or scan
38,534
def check_udp ( helper , host , port , session ) : open_ports = walk_data ( session , '.1.3.6.1.2.1.7.5.1.2' , helper ) [ 0 ] if scan : print "All open UDP ports at host " + host for port in open_ports : print "UDP: \t" + port quit ( ) if port in open_ports : udp_status = "OPEN" else : udp_status = "CLOSED" helper . status ( critical ) return ( "Current status for UDP port " + port + " is: " + udp_status )
the check logic for UDP ports
38,535
def check_tcp ( helper , host , port , warning_param , critical_param , session ) : tcp_translate = { "1" : "closed" , "2" : "listen" , "3" : "synSent" , "4" : "synReceived" , "5" : "established" , "6" : "finWait1" , "7" : "finWait2" , "8" : "closeWait" , "9" : "lastAck" , "10" : "closing" , "11" : "timeWait" , "12" : "deleteTCB" } open_ports = walk_data ( session , '.1.3.6.1.2.1.6.13.1.3' , helper ) [ 0 ] port_status = walk_data ( session , '.1.3.6.1.2.1.6.13.1.1' , helper ) [ 0 ] port_and_status = dict ( zip ( open_ports , port_status ) ) if scan : print "All open TCP ports: " + host for port in open_ports : tcp_status = port_and_status [ port ] tcp_status = tcp_translate [ tcp_status ] print "TCP: \t" + port + "\t Status: \t" + tcp_status quit ( ) if port in open_ports : tcp_status = port_and_status [ port ] tcp_status = tcp_translate [ tcp_status ] if tcp_status in warning_param : helper . status ( warning ) elif tcp_status in critical_param : helper . status ( critical ) else : helper . status ( ok ) else : tcp_status = "CLOSED" helper . status ( critical ) return ( "Current status for TCP port " + port + " is: " + tcp_status )
the check logic for check TCP ports
38,536
def to_json ( obj ) : i = StringIO . StringIO ( ) w = Writer ( i , encoding = 'UTF-8' ) w . write_value ( obj ) return i . getvalue ( )
Return a json string representing the python object obj .
38,537
def get_data ( self ) : "Get SNMP values from host" alarm_oids = [ netsnmp . Varbind ( alarms [ alarm_id ] [ 'oid' ] ) for alarm_id in self . models [ self . modem_type ] [ 'alarms' ] ] metric_oids = [ netsnmp . Varbind ( metrics [ metric_id ] [ 'oid' ] ) for metric_id in self . models [ self . modem_type ] [ 'metrics' ] ] response = self . snmp_session . get ( netsnmp . VarList ( * alarm_oids + metric_oids ) ) return ( response [ 0 : len ( alarm_oids ) ] , response [ len ( alarm_oids ) : ] )
Get SNMP values from host
38,538
def convert_in_oid ( service_name ) : s = service_name service_ascii = [ ord ( c ) for c in s ] length = str ( len ( s ) ) oid = base_oid + "." + length + "." + "." . join ( str ( x ) for x in service_ascii ) return oid
calculate the correct OID for the service name
38,539
def get_data ( self ) : "Return one SNMP response list for all status OIDs, and one list for all metric OIDs." alarm_oids = [ netsnmp . Varbind ( status_mib [ alarm_id ] [ 'oid' ] ) for alarm_id in self . models [ self . modem_type ] [ 'alarms' ] ] metric_oids = [ netsnmp . Varbind ( metric_mib [ metric_id ] [ 'oid' ] ) for metric_id in self . models [ self . modem_type ] [ 'metrics' ] ] response = self . snmp_session . get ( netsnmp . VarList ( * alarm_oids + metric_oids ) ) return ( response [ 0 : len ( alarm_oids ) ] , response [ len ( alarm_oids ) : ] )
Return one SNMP response list for all status OIDs and one list for all metric OIDs .
38,540
def check_inlet ( self , helper ) : try : inlet_values = self . sess . walk_oid ( self . oids [ 'oid_inlet_value' ] ) inlet_units = self . sess . walk_oid ( self . oids [ 'oid_inlet_unit' ] ) inlet_digits = self . sess . walk_oid ( self . oids [ 'oid_inlet_digits' ] ) inlet_states = self . sess . walk_oid ( self . oids [ 'oid_inlet_state' ] ) inlet_warning_uppers = self . sess . walk_oid ( self . oids [ 'oid_inlet_warning_upper' ] ) inlet_critical_uppers = self . sess . walk_oid ( self . oids [ 'oid_inlet_critical_upper' ] ) inlet_critical_lowers = self . sess . walk_oid ( self . oids [ 'oid_inlet_critical_lower' ] ) inlet_warning_lowers = self . sess . walk_oid ( self . oids [ 'oid_inlet_warning_lower' ] ) except health_monitoring_plugins . SnmpException as e : helper . exit ( summary = str ( e ) , exit_code = unknown , perfdata = '' ) helper . add_summary ( "Inlet" ) for x in range ( len ( inlet_values ) ) : inlet_unit = units [ int ( inlet_units [ x ] . val ) ] inlet_digit = inlet_digits [ x ] . val inlet_state = states [ int ( inlet_states [ x ] . val ) ] inlet_value = real_value ( inlet_values [ x ] . val , inlet_digit ) inlet_warning_upper = real_value ( inlet_warning_uppers [ x ] . val , inlet_digit ) inlet_critical_upper = real_value ( inlet_critical_uppers [ x ] . val , inlet_digit ) inlet_warning_lower = real_value ( inlet_warning_lowers [ x ] . val , inlet_digit ) inlet_critical_lower = real_value ( inlet_critical_lowers [ x ] . val , inlet_digit ) if inlet_state != "normal" : helper . add_summary ( "%s %s is %s" % ( inlet_value , inlet_unit , inlet_state ) ) helper . status ( critical ) helper . add_summary ( "%s %s" % ( inlet_value , inlet_unit ) ) helper . add_long_output ( "%s %s: %s" % ( inlet_value , inlet_unit , inlet_state ) ) helper . add_metric ( "Sensor " + str ( x ) + " -%s-" % inlet_unit , inlet_value , inlet_warning_lower + ":" + inlet_warning_upper , inlet_critical_lower + ":" + inlet_critical_upper , "" , "" , "" )
check the Inlets of Raritan PDUs
38,541
def check_outlet ( self , helper ) : try : outlet_name , outlet_state = self . sess . get_oids ( self . oids [ 'oid_outlet_name' ] , self . oids [ 'oid_outlet_state' ] ) except health_monitoring_plugins . SnmpException as e : helper . exit ( summary = str ( e ) , exit_code = unknown , perfdata = '' ) outlet_real_state = states [ int ( outlet_state ) ] if outlet_real_state != "on" : helper . status ( critical ) helper . add_summary ( "Outlet %s - '%s' is: %s" % ( self . number , outlet_name , outlet_real_state . upper ( ) ) )
check the status of the specified outlet
38,542
def check_sensor ( self , helper ) : try : sensor_name , sensor_state , sensor_type = self . sess . get_oids ( self . oids [ 'oid_sensor_name' ] , self . oids [ 'oid_sensor_state' ] , self . oids [ 'oid_sensor_type' ] ) except health_monitoring_plugins . SnmpException as e : helper . exit ( summary = str ( e ) , exit_code = unknown , perfdata = '' ) try : sensor_state_string = states [ int ( sensor_state ) ] except KeyError as e : helper . exit ( summary = "Invalid sensor response " + sensor_state , exit_code = unknown , perfdata = '' ) sensor_unit = "" sensor_unit_string = "" sensor_value = "" sensor_digit = "" real_sensor_value = "" sensor_warning_upper = "" sensor_critical_upper = "" sensor_warning_lower = "" sensor_critical_lower = "" if int ( sensor_type ) not in [ 14 , 16 , 17 , 18 , 19 , 20 ] : try : sensor_unit , sensor_digit , sensor_warning_upper , sensor_critical_upper , sensor_warning_lower , sensor_critical_lower , sensor_value = self . sess . get_oids ( self . oids [ 'oid_sensor_unit' ] , self . oids [ 'oid_sensor_digit' ] , self . oids [ 'oid_sensor_warning_upper' ] , self . oids [ 'oid_sensor_critical_upper' ] , self . oids [ 'oid_sensor_warning_lower' ] , self . oids [ 'oid_sensor_critical_lower' ] , self . oids [ 'oid_sensor_value' ] ) except health_monitoring_plugins . SnmpException as e : helper . exit ( summary = str ( e ) , exit_code = unknown , perfdata = '' ) sensor_unit_string = units [ int ( sensor_unit ) ] real_sensor_value = real_value ( int ( sensor_value ) , sensor_digit ) real_sensor_warning_upper = real_value ( sensor_warning_upper , sensor_digit ) real_sensor_critical_upper = real_value ( sensor_critical_upper , sensor_digit ) real_sensor_warning_lower = real_value ( sensor_warning_lower , sensor_digit ) real_sensor_critical_lower = real_value ( sensor_critical_lower , sensor_digit ) helper . add_metric ( sensor_name + " -%s- " % sensor_unit_string , real_sensor_value , real_sensor_warning_lower + ":" + real_sensor_warning_upper , real_sensor_critical_lower + ":" + real_sensor_critical_upper , "" , "" , "" ) if sensor_state_string in [ "closed" , "normal" , "on" , "notDetected" , "ok" , "yes" , "one" , "two" , "inSync" ] : helper . status ( ok ) elif sensor_state_string in [ "open" , "belowLowerWarning" , "aboveUpperWarning" , "marginal" , "standby" ] : helper . status ( warning ) elif sensor_state_string in [ "belowLowerCritical" , "aboveUpperCritical" , "off" , "detected" , "alarmed" , "fail" , "no" , "outOfSync" ] : helper . status ( critical ) elif sensor_state_string in [ "unavailable" ] : helper . status ( unknown ) else : helper . exit ( summary = "Something went wrong - received undefined state" , exit_code = unknown , perfdata = '' ) helper . add_summary ( "Sensor %s - '%s' %s%s is: %s" % ( self . number , sensor_name , real_sensor_value , sensor_unit_string , sensor_state_string ) )
check the status of the specified sensor
38,543
def process_gps_position ( self , helper , sess ) : gps_position = helper . get_snmp_value ( sess , helper , self . oids [ 'oid_gps_position' ] ) if gps_position : helper . add_summary ( gps_position ) else : helper . add_summary ( "Could not retrieve GPS position" ) helper . status ( unknown )
just print the current GPS position
38,544
def process_status ( self , helper , sess , check ) : if check == 'ntp_current_state' : ntp_status_int = helper . get_snmp_value ( sess , helper , self . oids [ 'oid_ntp_current_state_int' ] ) result = self . check_ntp_status ( ntp_status_int ) elif check == 'gps_mode' : gps_status_int = helper . get_snmp_value ( sess , helper , self . oids [ 'oid_gps_mode_int' ] ) result = self . check_gps_status ( gps_status_int ) else : return helper . update_status ( helper , result )
get the snmp value check the status and update the helper
38,545
def check_ntp_status ( self , ntp_status_int ) : ntp_status_string = self . ntp_status . get ( ntp_status_int , "unknown" ) if ntp_status_string == "unknown" : return unknown , ( "NTP status: " + ntp_status_string ) elif ntp_status_string != "synchronized" and ntp_status_string != "normalOperationPPS" : return critical , ( "NTP status: " + ntp_status_string ) return None
check the NTP status
38,546
def check_gps_status ( self , gps_status_int ) : gps_mode_string = self . gps_mode . get ( gps_status_int , "unknown" ) if gps_mode_string == "unknown" : return unknown , ( "GPS status: " + gps_mode_string ) elif gps_mode_string != "normalOperation" and gps_mode_string != "gpsSync" : return warning , ( "GPS status: " + gps_mode_string ) return None
check the GPS status
38,547
def process_satellites ( self , helper , sess ) : good_satellites = helper . get_snmp_value ( sess , helper , self . oids [ 'oid_gps_satellites_good' ] ) helper . add_summary ( "Good satellites: {}" . format ( good_satellites ) ) helper . add_metric ( label = 'satellites' , value = good_satellites )
check and show the good satellites
38,548
def login_password ( self , value ) : password = self . selenium . find_element ( * self . _password_input_locator ) password . clear ( ) password . send_keys ( value )
Set the value of the login password field .
38,549
def email ( self , value ) : email = self . wait . until ( expected . visibility_of_element_located ( self . _email_input_locator ) ) email . clear ( ) email . send_keys ( value )
Set the value of the email field .
38,550
def sign_in ( self , email , password ) : self . email = email self . login_password = password if self . is_element_present ( * self . _next_button_locator ) : self . wait . until ( expected . visibility_of_element_located ( self . _next_button_locator ) ) self . click_next ( ) self . click_sign_in ( )
Signs in using the specified email address and password .
38,551
def sign_in ( self , email = None , password = None ) : from . pages . sign_in import SignIn sign_in = SignIn ( self . selenium , self . timeout ) sign_in . sign_in ( email , password )
Signs in a user either with the specified email address and password or a returning user .
38,552
def write_csv ( fileobj , rows , encoding = ENCODING , dialect = DIALECT ) : csvwriter = csv . writer ( fileobj , dialect = dialect ) csv_writerows ( csvwriter , rows , encoding )
Dump rows to fileobj with the given encoding and CSV dialect .
38,553
def from_string ( cls , link ) : ma = cls . _pattern . search ( link ) if ma is None : raise ValueError ( link ) id = ma . group ( 'id' ) return cls ( id )
Return a new SheetUrl instance from parsed URL string .
38,554
def doctemplate ( * args ) : def decorator ( func ) : func . __doc__ = func . __doc__ % tuple ( args ) return func return decorator
Return a decorator putting args into the docstring of the decorated func .
38,555
def group_dict ( items , keyfunc ) : result = collections . defaultdict ( list ) for i in items : key = keyfunc ( i ) result [ key ] . append ( i ) return result
Return a list defaultdict with items grouped by keyfunc .
38,556
def uniqued ( iterable ) : seen = set ( ) return [ item for item in iterable if item not in seen and not seen . add ( item ) ]
Return unique list of iterable items preserving order .
38,557
def build_service ( name = None , ** kwargs ) : if name is not None : for kw , value in iteritems ( SERVICES [ name ] ) : kwargs . setdefault ( kw , value ) return apiclient . discovery . build ( ** kwargs )
Return a service endpoint for interacting with a Google API .
38,558
def spreadsheet ( service , id ) : request = service . spreadsheets ( ) . get ( spreadsheetId = id ) try : response = request . execute ( ) except apiclient . errors . HttpError as e : if e . resp . status == 404 : raise KeyError ( id ) else : raise return response
Fetch and return spreadsheet meta data with Google sheets API .
38,559
def values ( service , id , ranges ) : params = { 'majorDimension' : 'ROWS' , 'valueRenderOption' : 'UNFORMATTED_VALUE' , 'dateTimeRenderOption' : 'FORMATTED_STRING' } params . update ( spreadsheetId = id , ranges = ranges ) response = service . spreadsheets ( ) . values ( ) . batchGet ( ** params ) . execute ( ) return response [ 'valueRanges' ]
Fetch and return spreadsheet cell values with Google sheets API .
38,560
def get_credentials ( scopes = None , secrets = None , storage = None , no_webserver = False ) : scopes = Scopes . get ( scopes ) if secrets is None : secrets = SECRETS if storage is None : storage = STORAGE secrets , storage = map ( os . path . expanduser , ( secrets , storage ) ) store = file . Storage ( storage ) creds = store . get ( ) if creds is None or creds . invalid : flow = client . flow_from_clientsecrets ( secrets , scopes ) args = [ '--noauth_local_webserver' ] if no_webserver else [ ] flags = tools . argparser . parse_args ( args ) creds = tools . run_flow ( flow , store , flags ) return creds
Make OAuth 2 . 0 credentials for scopes from secrets and storage files .
38,561
def get ( cls , scope = None ) : if scope is None : scope = cls . default if isinstance ( scope , string_types ) and scope in cls . _keywords : return getattr ( cls , scope ) return scope
Return default or predefined URLs from keyword pass through scope .
38,562
def search_all ( self , limit = 50 , format = 'json' ) : desired_limit = limit results = self . _search ( limit , format ) limit = limit - len ( results ) while len ( results ) < desired_limit : more_results = self . _search ( limit , format ) if not more_results : break results += more_results limit = limit - len ( more_results ) time . sleep ( 1 ) return results
Returns a single list containing up to limit Result objects
38,563
def _search ( self , limit , format ) : url = self . QUERY_URL . format ( requests . utils . quote ( "'{}'" . format ( self . query ) ) , min ( 50 , limit ) , self . current_offset , format ) r = requests . get ( url , auth = ( "" , self . api_key ) ) try : json_results = r . json ( ) except ValueError as vE : if not self . safe : raise PyBingVideoException ( "Request returned with code %s, error msg: %s" % ( r . status_code , r . text ) ) else : print ( "[ERROR] Request returned with code %s, error msg: %s. \nContinuing in 5 seconds." % ( r . status_code , r . text ) ) time . sleep ( 5 ) packaged_results = [ VideoResult ( single_result_json ) for single_result_json in json_results [ 'd' ] [ 'results' ] ] self . current_offset += min ( 50 , limit , len ( packaged_results ) ) return packaged_results
Returns a list of result objects with the url for the next page bing search url .
38,564
def base26int ( s , _start = 1 - ord ( 'A' ) ) : return sum ( ( _start + ord ( c ) ) * 26 ** i for i , c in enumerate ( reversed ( s ) ) )
Return string s as int in bijective base26 notation .
38,565
def base26 ( x , _alphabet = string . ascii_uppercase ) : result = [ ] while x : x , digit = divmod ( x , 26 ) if not digit : x -= 1 digit = 26 result . append ( _alphabet [ digit - 1 ] ) return '' . join ( result [ : : - 1 ] )
Return positive int x as string in bijective base26 notation .
38,566
def _parse ( coord , _match = _regex . match ) : try : return _match ( coord ) . groups ( ) except AttributeError : raise ValueError ( coord )
Return match groups from single sheet coordinate .
38,567
def _cint ( col , _map = { base26 ( i ) : i - 1 for i in range ( 1 , 257 ) } ) : try : return _map [ col . upper ( ) ] except KeyError : raise ValueError ( col )
Return zero - based column index from bijective base26 string .
38,568
def from_slice ( cls , coord ) : if coord . step is not None : raise NotImplementedError ( 'no slice step support' ) elif coord . start is not None and coord . stop is not None : return DoubleSlice . from_slice ( coord ) elif coord . start is not None : xcol , xrow , col , row = cls . _parse ( coord . start ) if xcol is not None : return StartCell ( cls . _cint ( xcol ) , cls . _rint ( xrow ) ) elif col is not None : return StartCol ( cls . _cint ( col ) ) return StartRow ( cls . _rint ( row ) ) elif coord . stop is not None : xcol , xrow , col , row = cls . _parse ( coord . stop ) if xcol is not None : return StopCell ( cls . _cint ( xcol ) + 1 , cls . _rint ( xrow ) + 1 ) elif col is not None : return StopCol ( cls . _cint ( col ) + 1 ) return StopRow ( cls . _rint ( row ) + 1 ) return cls ( )
Return a value fetching callable given a slice of coordinate strings .
38,569
def find ( self , title ) : if title not in self . _titles : raise KeyError ( title ) return self . _titles [ title ] [ 0 ]
Return the first worksheet with the given title .
38,570
def findall ( self , title = None ) : if title is None : return list ( self . _sheets ) if title not in self . _titles : return [ ] return list ( self . _titles [ title ] )
Return a list of worksheets with the given title .
38,571
def to_csv ( self , encoding = export . ENCODING , dialect = export . DIALECT , make_filename = export . MAKE_FILENAME ) : for s in self . _sheets : s . to_csv ( None , encoding , dialect , make_filename )
Dump all worksheets of the spreadsheet to individual CSV files .
38,572
def titles ( self , unique = False ) : if unique : return tools . uniqued ( s . title for s in self . _items ) return [ s . title for s in self . _items ]
Return a list of contained worksheet titles .
38,573
def at ( self , row , col ) : if not ( isinstance ( row , int ) and isinstance ( col , int ) ) : raise TypeError ( row , col ) return self . _values [ row ] [ col ]
Return the value at the given cell position .
38,574
def values ( self , column_major = False ) : if column_major : return list ( map ( list , zip ( * self . _values ) ) ) return [ row [ : ] for row in self . _values ]
Return a nested list with the worksheet values .
38,575
def to_csv ( self , filename = None , encoding = export . ENCODING , dialect = export . DIALECT , make_filename = export . MAKE_FILENAME ) : if filename is None : if make_filename is None : make_filename = export . MAKE_FILENAME infos = { 'id' : self . _spreadsheet . _id , 'title' : self . _spreadsheet . _title , 'sheet' : self . _title , 'gid' : self . _id , 'index' : self . _index , 'dialect' : dialect , } if isinstance ( make_filename , string_types ) : filename = make_filename % infos else : filename = make_filename ( infos ) with export . open_csv ( filename , 'w' , encoding = encoding ) as fd : export . write_csv ( fd , self . _values , encoding , dialect )
Dump the worksheet to a CSV file .
38,576
def to_frame ( self , ** kwargs ) : r df = export . write_dataframe ( self . _values , ** kwargs ) df . name = self . title return df
r Return a pandas DataFrame loaded from the worksheet data .
38,577
def from_files ( cls , secrets = None , storage = None , scopes = None , no_webserver = False ) : creds = oauth2 . get_credentials ( scopes , secrets , storage , no_webserver ) return cls ( creds )
Return a spreadsheet collection making OAauth 2 . 0 credentials .
38,578
def get ( self , id_or_url , default = None ) : if '/' in id_or_url : id = urls . SheetUrl . from_string ( id_or_url ) . id else : id = id_or_url try : return self [ id ] except KeyError : return default
Fetch and return the spreadsheet with the given id or url .
38,579
def find ( self , title ) : files = backend . iterfiles ( self . _drive , name = title ) try : return next ( self [ id ] for id , _ in files ) except StopIteration : raise KeyError ( title )
Fetch and return the first spreadsheet with the given title .
38,580
def findall ( self , title = None ) : if title is None : return list ( self ) files = backend . iterfiles ( self . _drive , name = title ) return [ self [ id ] for id , _ in files ]
Fetch and return a list of spreadsheets with the given title .
38,581
def titles ( self , unique = False ) : if unique : return tools . uniqued ( title for _ , title in self . iterfiles ( ) ) return [ title for _ , title in self . iterfiles ( ) ]
Return a list of all available spreadsheet titles .
38,582
def create ( self , name , description , data_source_type , url , credential_user = None , credential_pass = None , is_public = None , is_protected = None , s3_credentials = None ) : data = { 'name' : name , 'description' : description , 'type' : data_source_type , 'url' : url , } credentials = { } self . _copy_if_defined ( credentials , user = credential_user , password = credential_pass ) credentials = credentials or s3_credentials self . _copy_if_defined ( data , is_public = is_public , is_protected = is_protected , credentials = credentials ) return self . _create ( '/data-sources' , data , 'data_source' )
Create a Data Source .
38,583
def update ( self , data_source_id , update_data ) : if self . version >= 2 : UPDATE_FUNC = self . _patch else : UPDATE_FUNC = self . _update return UPDATE_FUNC ( '/data-sources/%s' % data_source_id , update_data )
Update a Data Source .
38,584
def getitem_by_path ( d , path ) : return reduce ( lambda d , k : d [ k ] , path , d )
Access item in d using path .
38,585
def clean_empty ( self , d = DEFAULT ) : if d is DEFAULT : d = self if isinstance ( d , list ) : return [ v for v in ( self . clean_empty ( v ) for v in d ) if v or v == 0 ] elif isinstance ( d , type ( self ) ) : return type ( self ) ( { k : v for k , v in ( ( k , self . clean_empty ( v ) ) for k , v in d . items ( ) ) if v or v == 0 } ) elif isinstance ( d , dict ) : return { k : v for k , v in ( ( k , self . clean_empty ( v ) ) for k , v in d . items ( ) ) if v or v == 0 } return d
Returns a copy of d without empty leaves .
38,586
def compress ( self , d = DEFAULT ) : if d is DEFAULT : d = self if isinstance ( d , list ) : l = [ v for v in ( self . compress ( v ) for v in d ) ] try : return list ( set ( l ) ) except TypeError : ret = [ ] for i in l : if i not in ret : ret . append ( i ) return ret elif isinstance ( d , type ( self ) ) : return type ( self ) ( { k : v for k , v in ( ( k , self . compress ( v ) ) for k , v in d . items ( ) ) } ) elif isinstance ( d , dict ) : return { k : v for k , v in ( ( k , self . compress ( v ) ) for k , v in d . items ( ) ) } return d
Returns a copy of d with compressed leaves .
38,587
def cast_dicts ( self , to = DEFAULT , d = DEFAULT ) : if to is DEFAULT : to = type ( self ) if d is DEFAULT : d = self if isinstance ( d , list ) : return [ v for v in ( self . cast_dicts ( to , v ) for v in d ) ] elif isinstance ( d , dict ) : return to ( { k : v for k , v in ( ( k , self . cast_dicts ( to , v ) ) for k , v in d . items ( ) ) } ) return d
Returns a copy of d with all dicts casted to the type to .
38,588
def create ( self , name , url , description = None , extra = None , is_public = None , is_protected = None ) : data = { "name" : name , "url" : url } self . _copy_if_defined ( data , description = description , extra = extra , is_public = is_public , is_protected = is_protected ) return self . _create ( '/job-binaries' , data , 'job_binary' )
Create a Job Binary .
38,589
def get_file ( self , job_binary_id ) : resp = self . api . get ( '/job-binaries/%s/data' % job_binary_id ) if resp . status_code != 200 : self . _raise_api_exception ( resp ) return resp . content
Download a Job Binary .
38,590
def update ( self , job_binary_id , data ) : if self . version >= 2 : UPDATE_FUNC = self . _patch else : UPDATE_FUNC = self . _update return UPDATE_FUNC ( '/job-binaries/%s' % job_binary_id , data , 'job_binary' )
Update Job Binary .
38,591
def set ( conf ) : for name , value in conf . items ( ) : if value is not None : setattr ( Conf , name . upper ( ) , value )
Applies a configuration to the global config object
38,592
def get ( ) : return { attr : getattr ( Conf , attr ) for attr in dir ( Conf ( ) ) if not callable ( getattr ( Conf , attr ) ) and not attr . startswith ( "__" ) }
Gets the configuration as a dict
38,593
def load ( description , add_arguments_cb = lambda x : None , postprocess_conf_cb = lambda x : None ) : argparser = ArgumentParser ( description = description , prefix_chars = '-+' ) argparser . add_argument ( '--version' , dest = 'PRINT_VERSION' , action = 'store_true' , help = 'Print version and exit' ) add_arguments_cb ( argparser ) plugin_argparser = argparser . add_argument_group ( 'Plugins' ) plugins = { } def load_plugin_group ( group ) : for entry_point in iter_entry_points ( group = group ) : name = str ( entry_point ) . split ( ' =' , 1 ) [ 0 ] plugin = entry_point . load ( ) if isclass ( plugin ) and not plugin in Conf . SUPPORTED_PLUGIN_INTERFACES and any ( [ issubclass ( plugin , supported_plugin_interface ) for supported_plugin_interface in Conf . SUPPORTED_PLUGIN_INTERFACES ] ) : plugin_argparser . add_argument ( '+{}' . format ( name ) , dest = 'PLUGIN_{}' . format ( name ) , type = str , nargs = '?' , default = DEFAULT , metavar = 'args' . format ( name ) , help = make_argparse_help_safe ( call_plugin ( plugin , 'help' ) ) ) plugins [ name ] = plugin else : warning ( 'Plugin not supported: {}' . format ( name ) ) load_plugin_group ( Conf . PLUGIN_GROUP_BASE ) if Conf . LOAD_PLUGINS : load_plugin_group ( Conf . PLUGIN_GROUP ) conf = vars ( argparser . parse_args ( [ v if i == 0 or v [ 0 ] == '+' or Conf . ARGS [ i - 1 ] [ 0 ] != '+' else b32encode ( v . encode ( ) ) . decode ( ) for i , v in enumerate ( Conf . ARGS ) ] ) ) postprocess_conf_cb ( conf ) Conf . set ( conf ) if Conf . PRINT_VERSION : print ( 'pdml2flow version {}' . format ( Conf . VERSION ) , file = Conf . OUT ) sys . exit ( 0 ) Conf . PLUGINS = [ ] for conf_name , args in conf . items ( ) : if conf_name . startswith ( 'PLUGIN_' ) and args != DEFAULT : plugin_name = conf_name [ 7 : ] Conf . PLUGINS . append ( plugins [ plugin_name ] ( * split ( b32decode ( args . encode ( ) ) . decode ( ) if args is not None else '' ) ) )
Loads the global Conf object from command line arguments .
38,594
def create ( self , name , plugin_name , plugin_version , flavor_id , description = None , volumes_per_node = None , volumes_size = None , node_processes = None , node_configs = None , floating_ip_pool = None , security_groups = None , auto_security_group = None , availability_zone = None , volumes_availability_zone = None , volume_type = None , image_id = None , is_proxy_gateway = None , volume_local_to_instance = None , use_autoconfig = None , shares = None , is_public = None , is_protected = None , volume_mount_prefix = None , boot_from_volume = None , boot_volume_type = None , boot_volume_availability_zone = None , boot_volume_local_to_instance = None ) : data = { 'name' : name , 'plugin_name' : plugin_name , 'plugin_version' : plugin_version , 'flavor_id' : flavor_id , 'node_processes' : node_processes } return self . _do_create ( data , description , volumes_per_node , volumes_size , node_configs , floating_ip_pool , security_groups , auto_security_group , availability_zone , volumes_availability_zone , volume_type , image_id , is_proxy_gateway , volume_local_to_instance , use_autoconfig , shares , is_public , is_protected , volume_mount_prefix , boot_from_volume , boot_volume_type , boot_volume_availability_zone , boot_volume_local_to_instance )
Create a Node Group Template .
38,595
def update_image ( self , image_id , user_name , desc = None ) : desc = desc if desc else '' data = { "username" : user_name , "description" : desc } return self . _post ( '/images/%s' % image_id , data )
Create or update an Image in Image Registry .
38,596
def update_tags ( self , image_id , new_tags ) : old_image = self . get ( image_id ) old_tags = frozenset ( old_image . tags ) new_tags = frozenset ( new_tags ) to_add = list ( new_tags - old_tags ) to_remove = list ( old_tags - new_tags ) add_response , remove_response = None , None if to_add : add_response = self . _post ( '/images/%s/tag' % image_id , { 'tags' : to_add } , 'image' ) if to_remove : remove_response = self . _post ( '/images/%s/untag' % image_id , { 'tags' : to_remove } , 'image' ) return remove_response or add_response or self . get ( image_id )
Update an Image tags .
38,597
def build_option_parser ( parser ) : parser . add_argument ( "--os-data-processing-api-version" , metavar = "<data-processing-api-version>" , default = utils . env ( 'OS_DATA_PROCESSING_API_VERSION' , default = DEFAULT_DATA_PROCESSING_API_VERSION ) , help = ( "Data processing API version, default=" + DEFAULT_DATA_PROCESSING_API_VERSION + ' (Env: OS_DATA_PROCESSING_API_VERSION)' ) ) parser . add_argument ( "--os-data-processing-url" , default = utils . env ( "OS_DATA_PROCESSING_URL" ) , help = ( "Data processing API URL, " "(Env: OS_DATA_PROCESSING_API_URL)" ) ) return parser
Hook to add global options .
38,598
def create ( self , name , plugin_name , hadoop_version , description = None , cluster_configs = None , node_groups = None , anti_affinity = None , net_id = None , default_image_id = None , use_autoconfig = None , shares = None , is_public = None , is_protected = None , domain_name = None ) : data = { 'name' : name , 'plugin_name' : plugin_name , 'hadoop_version' : hadoop_version , } return self . _do_create ( data , description , cluster_configs , node_groups , anti_affinity , net_id , default_image_id , use_autoconfig , shares , is_public , is_protected , domain_name )
Create a Cluster Template .
38,599
def update ( self , cluster_template_id , name = NotUpdated , plugin_name = NotUpdated , plugin_version = NotUpdated , description = NotUpdated , cluster_configs = NotUpdated , node_groups = NotUpdated , anti_affinity = NotUpdated , net_id = NotUpdated , default_image_id = NotUpdated , use_autoconfig = NotUpdated , shares = NotUpdated , is_public = NotUpdated , is_protected = NotUpdated , domain_name = NotUpdated ) : data = { } self . _copy_if_updated ( data , name = name , plugin_name = plugin_name , plugin_version = plugin_version , description = description , cluster_configs = cluster_configs , node_groups = node_groups , anti_affinity = anti_affinity , neutron_management_network = net_id , default_image_id = default_image_id , use_autoconfig = use_autoconfig , shares = shares , is_public = is_public , is_protected = is_protected , domain_name = domain_name ) return self . _patch ( '/cluster-templates/%s' % cluster_template_id , data , 'cluster_template' )
Update a Cluster Template .