idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
27,600
def metric_find ( self ) : params = { 'resultLimit' : 50 , 'resultStart' : 0 } while True : if params . get ( 'resultStart' ) >= params . get ( 'resultLimit' ) : break r = self . tcex . session . get ( '/v2/customMetrics' , params = params ) if not r . ok or 'application/json' not in r . headers . get ( 'content-type' , '' ) : self . tcex . handle_error ( 705 , [ r . status_code , r . text ] ) data = r . json ( ) for metric in data . get ( 'data' , { } ) . get ( 'customMetricConfig' ) : if metric . get ( 'name' ) == self . _metric_name : self . _metric_id = metric . get ( 'id' ) info = 'found metric with name "{}" and Id {}.' self . tcex . log . info ( info . format ( self . _metric_name , self . _metric_id ) ) return True params [ 'resultStart' ] += params . get ( 'resultLimit' ) return False
Find the Metric by name .
259
7
27,601
def add ( self , value , date = None , return_value = False , key = None ) : data = { } if self . _metric_id is None : self . tcex . handle_error ( 715 , [ self . _metric_name ] ) body = { 'value' : value } if date is not None : body [ 'date' ] = self . tcex . utils . format_datetime ( date , date_format = '%Y-%m-%dT%H:%M:%SZ' ) if key is not None : body [ 'name' ] = key self . tcex . log . debug ( 'metric data: {}' . format ( body ) ) params = { } if return_value : params = { 'returnValue' : 'true' } url = '/v2/customMetrics/{}/data' . format ( self . _metric_id ) r = self . tcex . session . post ( url , json = body , params = params ) if r . status_code == 200 and 'application/json' in r . headers . get ( 'content-type' , '' ) : data = r . json ( ) elif r . status_code == 204 : pass else : self . tcex . handle_error ( 710 , [ r . status_code , r . text ] ) return data
Add metrics data to collection .
306
6
27,602
def add_keyed ( self , value , key , date = None , return_value = False ) : return self . add ( value , date , return_value , key )
Add keyed metrics data to collection .
38
8
27,603
def hget ( self , key ) : data = self . r . hget ( self . hash , key ) if data is not None and not isinstance ( data , str ) : data = str ( self . r . hget ( self . hash , key ) , 'utf-8' ) return data
Read data from Redis for the provided key .
65
10
27,604
def _load_secure_params ( self ) : self . tcex . log . info ( 'Loading secure params.' ) # Retrieve secure params and inject them into sys.argv r = self . tcex . session . get ( '/internal/job/execution/parameters' ) # check for bad status code and response that is not JSON if not r . ok or r . headers . get ( 'content-type' ) != 'application/json' : err = r . text or r . reason self . tcex . exit ( 1 , 'Error retrieving secure params from API ({}).' . format ( err ) ) # return secure params return r . json ( ) . get ( 'inputs' , { } )
Load secure params from the API .
158
7
27,605
def _results_tc_args ( self ) : results = [ ] if os . access ( self . default_args . tc_out_path , os . W_OK ) : result_file = '{}/results.tc' . format ( self . default_args . tc_out_path ) else : result_file = 'results.tc' if os . path . isfile ( result_file ) : with open ( result_file , 'r' ) as rh : results = rh . read ( ) . strip ( ) . split ( '\n' ) os . remove ( result_file ) for line in results : if not line or ' = ' not in line : continue key , value = line . split ( ' = ' ) if value == 'true' : value = True elif value == 'false' : value = False elif not value : value = None setattr ( self . _default_args , key , value )
Read data from results_tc file from previous run of app .
203
13
27,606
def _unknown_args ( self , args ) : for u in args : self . tcex . log . warning ( u'Unsupported arg found ({}).' . format ( u ) )
Log argparser unknown arguments .
42
6
27,607
def args_update ( self ) : for key , value in self . _config_data . items ( ) : setattr ( self . _default_args , key , value )
Update the argparser namespace with any data from configuration file .
38
12
27,608
def config_file ( self , filename ) : if os . path . isfile ( filename ) : with open ( filename , 'r' ) as fh : self . _config_data = json . load ( fh ) else : self . tcex . log . error ( 'Could not load configuration file "{}".' . format ( filename ) )
Load configuration data from provided file and inject values into sys . argv .
76
15
27,609
def default_args ( self ) : if self . _default_args is None : self . _default_args , unknown = self . parser . parse_known_args ( ) # pylint: disable=W0612 # reinitialize logger with new log level and api settings self . tcex . _logger ( ) if self . _default_args . tc_aot_enabled : # block for AOT message and get params params = self . tcex . playbook . aot_blpop ( ) self . inject_params ( params ) elif self . _default_args . tc_secure_params : # inject secure params from API params = self . _load_secure_params ( ) self . inject_params ( params ) return self . _default_args
Parse args and return default args .
168
8
27,610
def inject_params ( self , params ) : for arg , value in params . items ( ) : cli_arg = '--{}' . format ( arg ) if cli_arg in sys . argv : # arg already passed on the command line self . tcex . log . debug ( 'skipping existing arg: {}' . format ( cli_arg ) ) continue # ThreatConnect secure/AOT params should be updated in the future to proper JSON format. # MultiChoice data should be represented as JSON array and Boolean values should be a # JSON boolean and not a string. param_data = self . tcex . install_json_params . get ( arg ) or { } if param_data . get ( 'type' , '' ) . lower ( ) == 'multichoice' : # update "|" delimited value to a proper array for params that have type of # MultiChoice. value = value . split ( '|' ) elif param_data . get ( 'type' , '' ) . lower ( ) == 'boolean' : # update value to be a boolean instead of string "true"/"false". value = self . tcex . utils . to_bool ( value ) elif arg in self . tc_bool_args : value = self . tcex . utils . to_bool ( value ) if isinstance ( value , ( bool ) ) : # handle bool values as flags (e.g., --flag) with no value if value is True : sys . argv . append ( cli_arg ) elif isinstance ( value , ( list ) ) : for mcv in value : sys . argv . append ( '{}={}' . format ( cli_arg , mcv ) ) else : sys . argv . append ( '{}={}' . format ( cli_arg , value ) ) # reset default_args now that values have been injected into sys.argv self . _default_args , unknown = self . parser . parse_known_args ( ) # pylint: disable=W0612 # reinitialize logger with new log level and api settings self . tcex . _logger ( )
Inject params into sys . argv from secureParams API AOT or user provided .
474
19
27,611
def many ( self ) : for i in self . tc_requests . many ( self . api_type , None , self . api_entity ) : yield i
Gets all of the owners available .
35
8
27,612
def dns_resolution ( self ) : if not self . can_update ( ) : self . _tcex . handle_error ( 910 , [ self . type ] ) return self . tc_requests . dns_resolution ( self . api_type , self . api_sub_type , self . unique_id , owner = self . owner )
Updates the Host DNS resolution
78
6
27,613
def _create_tcex_dirs ( ) : dirs = [ 'tcex.d' , 'tcex.d/data' , 'tcex.d/profiles' ] for d in dirs : if not os . path . isdir ( d ) : os . makedirs ( d )
Create tcex . d directory and sub directories .
72
11
27,614
def expand_valid_values ( valid_values ) : if '${GROUP_TYPES}' in valid_values : valid_values . remove ( '${GROUP_TYPES}' ) valid_values . extend ( [ 'Adversary' , 'Campaign' , 'Document' , 'Email' , 'Event' , 'Incident' , 'Intrusion Set' , 'Signature' , 'Task' , 'Threat' , ] ) elif '${OWNERS}' in valid_values : valid_values . remove ( '${OWNERS}' ) valid_values . append ( '' ) elif '${USERS}' in valid_values : valid_values . remove ( '${USERS}' ) valid_values . append ( '' ) return valid_values
Expand supported playbook variables to their full list .
172
10
27,615
def gen_permutations ( self , index = 0 , args = None ) : if args is None : args = [ ] try : name = self . layout_json_names [ index ] display = self . layout_json_params . get ( name , { } ) . get ( 'display' ) input_type = self . install_json_params ( ) . get ( name , { } ) . get ( 'type' ) if self . validate_layout_display ( self . input_table , display ) : if input_type . lower ( ) == 'boolean' : for val in [ True , False ] : args . append ( { 'name' : name , 'value' : val } ) self . db_update_record ( self . input_table , name , val ) self . gen_permutations ( index + 1 , list ( args ) ) # remove the previous arg before next iteration args . pop ( ) elif input_type . lower ( ) == 'choice' : valid_values = self . expand_valid_values ( self . install_json_params ( ) . get ( name , { } ) . get ( 'validValues' , [ ] ) ) for val in valid_values : args . append ( { 'name' : name , 'value' : val } ) self . db_update_record ( self . input_table , name , val ) self . gen_permutations ( index + 1 , list ( args ) ) # remove the previous arg before next iteration args . pop ( ) else : args . append ( { 'name' : name , 'value' : None } ) self . gen_permutations ( index + 1 , list ( args ) ) else : self . gen_permutations ( index + 1 , list ( args ) ) except IndexError : # when IndexError is reached all data has been processed. self . _input_permutations . append ( args ) outputs = [ ] for o_name in self . install_json_output_variables ( ) : if self . layout_json_outputs . get ( o_name ) is not None : display = self . layout_json_outputs . get ( o_name , { } ) . get ( 'display' ) valid = self . validate_layout_display ( self . input_table , display ) if display is None or not valid : continue for ov in self . install_json_output_variables ( ) . get ( o_name ) : outputs . append ( ov ) self . _output_permutations . append ( outputs )
Iterate recursively over layout . json parameter names .
544
12
27,616
def load_profiles ( self ) : if not os . path . isfile ( 'tcex.json' ) : msg = 'The tcex.json config file is required.' sys . exit ( msg ) # create default directories self . _create_tcex_dirs ( ) # open tcex.json configuration file with open ( 'tcex.json' , 'r+' ) as fh : data = json . load ( fh ) if data . get ( 'profiles' ) is not None : # no longer supporting profiles in tcex.json print ( '{}{}Migrating profiles from tcex.json to individual files.' . format ( c . Style . BRIGHT , c . Fore . YELLOW ) ) for profile in data . get ( 'profiles' ) or [ ] : outfile = '{}.json' . format ( profile . get ( 'profile_name' ) . replace ( ' ' , '_' ) . lower ( ) ) self . profile_write ( profile , outfile ) # remove legacy profile key del data [ 'profiles' ] data . setdefault ( 'profile_include_dirs' , [ ] ) if self . profile_dir not in data . get ( 'profile_include_dirs' ) : data [ 'profile_include_dirs' ] . append ( self . profile_dir ) fh . seek ( 0 ) fh . write ( json . dumps ( data , indent = 2 , sort_keys = True ) ) fh . truncate ( ) # load includes for directory in data . get ( 'profile_include_dirs' ) or [ ] : self . load_profile_include ( directory )
Return configuration data .
370
4
27,617
def load_profiles_from_file ( self , fqfn ) : if self . args . verbose : print ( 'Loading profiles from File: {}{}{}' . format ( c . Style . BRIGHT , c . Fore . MAGENTA , fqfn ) ) with open ( fqfn , 'r+' ) as fh : data = json . load ( fh ) for profile in data : # force update old profiles self . profile_update ( profile ) if self . args . action == 'validate' : self . validate ( profile ) fh . seek ( 0 ) fh . write ( json . dumps ( data , indent = 2 , sort_keys = True ) ) fh . truncate ( ) for d in data : if d . get ( 'profile_name' ) in self . profiles : self . handle_error ( 'Found a duplicate profile name ({}).' . format ( d . get ( 'profile_name' ) ) ) self . profiles . setdefault ( d . get ( 'profile_name' ) , { 'data' : d , 'ij_filename' : d . get ( 'install_json' ) , 'fqfn' : fqfn } , )
Load profiles from file .
264
5
27,618
def print_permutations ( self ) : index = 0 permutations = [ ] for p in self . _input_permutations : permutations . append ( { 'index' : index , 'args' : p } ) index += 1 with open ( 'permutations.json' , 'w' ) as fh : json . dump ( permutations , fh , indent = 2 ) print ( 'All permutations written to the "permutations.json" file.' )
Print all valid permutations .
100
6
27,619
def profile_create ( self ) : if self . args . profile_name in self . profiles : self . handle_error ( 'Profile "{}" already exists.' . format ( self . args . profile_name ) ) # load the install.json file defined as a arg (default: install.json) ij = self . load_install_json ( self . args . ij ) print ( 'Building Profile: {}{}{}' . format ( c . Style . BRIGHT , c . Fore . CYAN , self . args . profile_name ) ) profile = OrderedDict ( ) profile [ 'args' ] = { } profile [ 'args' ] [ 'app' ] = { } profile [ 'args' ] [ 'app' ] [ 'optional' ] = self . profile_settings_args ( ij , False ) profile [ 'args' ] [ 'app' ] [ 'required' ] = self . profile_settings_args ( ij , True ) profile [ 'args' ] [ 'default' ] = self . profile_setting_default_args ( ij ) profile [ 'autoclear' ] = True profile [ 'clear' ] = [ ] profile [ 'description' ] = '' profile [ 'data_files' ] = [ ] profile [ 'exit_codes' ] = [ 0 ] profile [ 'groups' ] = [ os . environ . get ( 'TCEX_GROUP' , 'qa-build' ) ] profile [ 'install_json' ] = self . args . ij profile [ 'profile_name' ] = self . args . profile_name profile [ 'quiet' ] = False if ij . get ( 'runtimeLevel' ) == 'Playbook' : validations = self . profile_settings_validations profile [ 'validations' ] = validations . get ( 'rules' ) profile [ 'args' ] [ 'default' ] [ 'tc_playbook_out_variables' ] = '{}' . format ( ',' . join ( validations . get ( 'outputs' ) ) ) return profile
Create a profile .
449
4
27,620
def profile_delete ( self ) : self . validate_profile_exists ( ) profile_data = self . profiles . get ( self . args . profile_name ) fqfn = profile_data . get ( 'fqfn' ) with open ( fqfn , 'r+' ) as fh : data = json . load ( fh ) for profile in data : if profile . get ( 'profile_name' ) == self . args . profile_name : data . remove ( profile ) fh . seek ( 0 ) fh . write ( json . dumps ( data , indent = 2 , sort_keys = True ) ) fh . truncate ( ) if not data : # remove empty file os . remove ( fqfn )
Delete an existing profile .
160
5
27,621
def profile_settings_args ( self , ij , required ) : if self . args . permutation_id is not None : if 'sqlite3' not in sys . modules : print ( 'The sqlite3 module needs to be build-in to Python for this feature.' ) sys . exit ( 1 ) profile_args = self . profile_settings_args_layout_json ( required ) else : profile_args = self . profile_settings_args_install_json ( ij , required ) return profile_args
Return args based on install . json or layout . json params .
112
13
27,622
def profile_settings_args_install_json ( self , ij , required ) : profile_args = { } # add App specific args for p in ij . get ( 'params' ) or [ ] : # TODO: fix this required logic if p . get ( 'required' , False ) != required and required is not None : continue if p . get ( 'type' ) . lower ( ) == 'boolean' : profile_args [ p . get ( 'name' ) ] = self . _to_bool ( p . get ( 'default' , False ) ) elif p . get ( 'type' ) . lower ( ) == 'choice' : valid_values = '|' . join ( self . expand_valid_values ( p . get ( 'validValues' , [ ] ) ) ) profile_args [ p . get ( 'name' ) ] = '[{}]' . format ( valid_values ) elif p . get ( 'type' ) . lower ( ) == 'multichoice' : profile_args [ p . get ( 'name' ) ] = p . get ( 'validValues' , [ ] ) elif p . get ( 'name' ) in [ 'api_access_id' , 'api_secret_key' ] : # leave these parameters set to the value defined in defaults pass else : types = '|' . join ( p . get ( 'playbookDataType' , [ ] ) ) if types : profile_args [ p . get ( 'name' ) ] = p . get ( 'default' , '<{}>' . format ( types ) ) else : profile_args [ p . get ( 'name' ) ] = p . get ( 'default' , '' ) return profile_args
Return args based on install . json params .
381
9
27,623
def profile_settings_args_layout_json ( self , required ) : profile_args = { } self . db_create_table ( self . input_table , self . install_json_params ( ) . keys ( ) ) self . db_insert_record ( self . input_table , self . install_json_params ( ) . keys ( ) ) self . gen_permutations ( ) try : for pn in self . _input_permutations [ self . args . permutation_id ] : p = self . install_json_params ( ) . get ( pn . get ( 'name' ) ) if p . get ( 'required' , False ) != required : continue if p . get ( 'type' ) . lower ( ) == 'boolean' : # use the value generated in the permutation profile_args [ p . get ( 'name' ) ] = pn . get ( 'value' ) elif p . get ( 'type' ) . lower ( ) == 'choice' : # use the value generated in the permutation profile_args [ p . get ( 'name' ) ] = pn . get ( 'value' ) elif p . get ( 'name' ) in [ 'api_access_id' , 'api_secret_key' ] : # leave these parameters set to the value defined in defaults pass else : # add type stub for values types = '|' . join ( p . get ( 'playbookDataType' , [ ] ) ) if types : profile_args [ p . get ( 'name' ) ] = p . get ( 'default' , '<{}>' . format ( types ) ) else : profile_args [ p . get ( 'name' ) ] = p . get ( 'default' , '' ) except IndexError : self . handle_error ( 'Invalid permutation index provided.' ) return profile_args
Return args based on layout . json and conditional rendering .
408
11
27,624
def profile_setting_default_args ( ij ) : # build default args profile_default_args = OrderedDict ( ) profile_default_args [ 'api_default_org' ] = '$env.API_DEFAULT_ORG' profile_default_args [ 'api_access_id' ] = '$env.API_ACCESS_ID' profile_default_args [ 'api_secret_key' ] = '$envs.API_SECRET_KEY' profile_default_args [ 'tc_api_path' ] = '$env.TC_API_PATH' profile_default_args [ 'tc_docker' ] = False profile_default_args [ 'tc_in_path' ] = 'log' profile_default_args [ 'tc_log_level' ] = 'debug' profile_default_args [ 'tc_log_path' ] = 'log' profile_default_args [ 'tc_log_to_api' ] = False profile_default_args [ 'tc_out_path' ] = 'log' profile_default_args [ 'tc_proxy_external' ] = False profile_default_args [ 'tc_proxy_host' ] = '$env.TC_PROXY_HOST' profile_default_args [ 'tc_proxy_port' ] = '$env.TC_PROXY_PORT' profile_default_args [ 'tc_proxy_password' ] = '$envs.TC_PROXY_PASSWORD' profile_default_args [ 'tc_proxy_tc' ] = False profile_default_args [ 'tc_proxy_username' ] = '$env.TC_PROXY_USERNAME' profile_default_args [ 'tc_temp_path' ] = 'log' if ij . get ( 'runtimeLevel' ) == 'Playbook' : profile_default_args [ 'tc_playbook_db_type' ] = 'Redis' profile_default_args [ 'tc_playbook_db_context' ] = str ( uuid4 ( ) ) profile_default_args [ 'tc_playbook_db_path' ] = '$env.DB_PATH' profile_default_args [ 'tc_playbook_db_port' ] = '$env.DB_PORT' profile_default_args [ 'tc_playbook_out_variables' ] = '' return profile_default_args
Build the default args for this profile .
540
8
27,625
def profile_settings_validations ( self ) : ij = self . load_install_json ( self . args . ij ) validations = { 'rules' : [ ] , 'outputs' : [ ] } job_id = randint ( 1000 , 9999 ) output_variables = ij . get ( 'playbook' , { } ) . get ( 'outputVariables' ) or [ ] if self . args . permutation_id is not None : output_variables = self . _output_permutations [ self . args . permutation_id ] # for o in ij.get('playbook', {}).get('outputVariables') or []: for o in output_variables : variable = '#App:{}:{}!{}' . format ( job_id , o . get ( 'name' ) , o . get ( 'type' ) ) validations [ 'outputs' ] . append ( variable ) # null check od = OrderedDict ( ) if o . get ( 'type' ) . endswith ( 'Array' ) : od [ 'data' ] = [ None , [ ] ] od [ 'data_type' ] = 'redis' od [ 'operator' ] = 'ni' else : od [ 'data' ] = None od [ 'data_type' ] = 'redis' od [ 'operator' ] = 'ne' od [ 'variable' ] = variable validations [ 'rules' ] . append ( od ) # type check od = OrderedDict ( ) if o . get ( 'type' ) . endswith ( 'Array' ) : od [ 'data' ] = 'array' od [ 'data_type' ] = 'redis' od [ 'operator' ] = 'it' elif o . get ( 'type' ) . endswith ( 'Binary' ) : od [ 'data' ] = 'binary' od [ 'data_type' ] = 'redis' od [ 'operator' ] = 'it' elif o . get ( 'type' ) . endswith ( 'Entity' ) or o . get ( 'type' ) == 'KeyValue' : od [ 'data' ] = 'entity' od [ 'data_type' ] = 'redis' od [ 'operator' ] = 'it' else : od [ 'data' ] = 'string' od [ 'data_type' ] = 'redis' od [ 'operator' ] = 'it' od [ 'variable' ] = variable validations [ 'rules' ] . append ( od ) return validations
Create 2 default validations rules for each output variable .
568
11
27,626
def profile_update ( self , profile ) : # warn about missing install_json parameter if profile . get ( 'install_json' ) is None : print ( '{}{}Missing install_json parameter for profile {}.' . format ( c . Style . BRIGHT , c . Fore . YELLOW , profile . get ( 'profile_name' ) ) ) # update args section to v2 schema self . profile_update_args_v2 ( profile ) # update args section to v3 schema self . profile_update_args_v3 ( profile ) # remove legacy script field self . profile_update_schema ( profile )
Update an existing profile with new parameters or remove deprecated parameters .
135
12
27,627
def profile_update_args_v2 ( self , profile ) : ij = self . load_install_json ( profile . get ( 'install_json' , 'install.json' ) ) if ( profile . get ( 'args' , { } ) . get ( 'app' ) is None and profile . get ( 'args' , { } ) . get ( 'default' ) is None ) : _args = profile . pop ( 'args' ) profile [ 'args' ] = { } profile [ 'args' ] [ 'app' ] = { } profile [ 'args' ] [ 'default' ] = { } for arg in self . profile_settings_args_install_json ( ij , None ) : try : profile [ 'args' ] [ 'app' ] [ arg ] = _args . pop ( arg ) except KeyError : # set the value to the default? # profile['args']['app'][arg] = self.profile_settings_args.get(arg) # TODO: prompt to add missing input? if self . args . verbose : print ( '{}{}Input "{}" not found in profile "{}".' . format ( c . Style . BRIGHT , c . Fore . YELLOW , arg , profile . get ( 'profile_name' ) ) ) profile [ 'args' ] [ 'default' ] = _args print ( '{}{}Updating args section to v2 schema for profile {}.' . format ( c . Style . BRIGHT , c . Fore . YELLOW , profile . get ( 'profile_name' ) ) )
Update v1 profile args to v2 schema for args .
347
12
27,628
def profile_update_args_v3 ( self , profile ) : ij = self . load_install_json ( profile . get ( 'install_json' , 'install.json' ) ) ijp = self . install_json_params ( ij ) if ( profile . get ( 'args' , { } ) . get ( 'app' , { } ) . get ( 'optional' ) is None and profile . get ( 'args' , { } ) . get ( 'app' , { } ) . get ( 'required' ) is None ) : app_args = profile [ 'args' ] . pop ( 'app' ) profile [ 'args' ] [ 'app' ] = { } profile [ 'args' ] [ 'app' ] [ 'optional' ] = { } profile [ 'args' ] [ 'app' ] [ 'required' ] = { } for arg in self . profile_settings_args_install_json ( ij , None ) : required = ijp . get ( arg ) . get ( 'required' , False ) try : if required : profile [ 'args' ] [ 'app' ] [ 'required' ] [ arg ] = app_args . pop ( arg ) else : profile [ 'args' ] [ 'app' ] [ 'optional' ] [ arg ] = app_args . pop ( arg ) except KeyError : if self . args . verbose : print ( '{}{}Input "{}" not found in profile "{}".' . format ( c . Style . BRIGHT , c . Fore . YELLOW , arg , profile . get ( 'profile_name' ) ) ) print ( '{}{}Updating args section to v3 schema for profile {}.' . format ( c . Style . BRIGHT , c . Fore . YELLOW , profile . get ( 'profile_name' ) ) )
Update v1 profile args to v3 schema for args .
402
12
27,629
def profile_update_schema ( profile ) : # add new "autoclear" field if profile . get ( 'autoclear' ) is None : print ( '{}{}Profile Update: Adding new "autoclear" parameter.' . format ( c . Style . BRIGHT , c . Fore . YELLOW ) ) profile [ 'autoclear' ] = True # add new "data_type" field for validation in profile . get ( 'validations' ) or [ ] : if validation . get ( 'data_type' ) is None : print ( '{}{}Profile Update: Adding new "data_type" parameter.' . format ( c . Style . BRIGHT , c . Fore . YELLOW ) ) validation [ 'data_type' ] = 'redis' # remove "script" parameter from profile if profile . get ( 'install_json' ) is not None and profile . get ( 'script' ) is not None : print ( '{}{}Removing deprecated "script" parameter.' . format ( c . Style . BRIGHT , c . Fore . YELLOW ) ) profile . pop ( 'script' )
Update profile to latest schema .
246
6
27,630
def profile_write ( self , profile , outfile = None ) : # fully qualified output file if outfile is None : outfile = '{}.json' . format ( profile . get ( 'profile_name' ) . replace ( ' ' , '_' ) . lower ( ) ) fqpn = os . path . join ( self . profile_dir , outfile ) if os . path . isfile ( fqpn ) : # append print ( 'Append to File: {}{}{}' . format ( c . Style . BRIGHT , c . Fore . CYAN , fqpn ) ) with open ( fqpn , 'r+' ) as fh : try : data = json . load ( fh , object_pairs_hook = OrderedDict ) except ValueError as e : self . handle_error ( 'Can not parse JSON data ({}).' . format ( e ) ) data . append ( profile ) fh . seek ( 0 ) fh . write ( json . dumps ( data , indent = 2 , sort_keys = True ) ) fh . truncate ( ) else : print ( 'Create File: {}{}{}' . format ( c . Style . BRIGHT , c . Fore . CYAN , fqpn ) ) with open ( fqpn , 'w' ) as fh : data = [ profile ] fh . write ( json . dumps ( data , indent = 2 , sort_keys = True ) )
Write the profile to the output directory .
317
8
27,631
def replace_validation ( self ) : self . validate_profile_exists ( ) profile_data = self . profiles . get ( self . args . profile_name ) # check redis # if redis is None: # self.handle_error('Could not get connection to Redis') # load hash redis_hash = profile_data . get ( 'data' , { } ) . get ( 'args' , { } ) . get ( 'tc_playbook_db_context' ) if redis_hash is None : self . handle_error ( 'Could not find redis hash (db context).' ) # load data data = self . redis . hgetall ( redis_hash ) if data is None : self . handle_error ( 'Could not load data for hash {}.' . format ( redis_hash ) ) validations = { 'rules' : [ ] , 'outputs' : [ ] } for v , d in data . items ( ) : variable = v . decode ( 'utf-8' ) # data = d.decode('utf-8') data = json . loads ( d . decode ( 'utf-8' ) ) # if data == 'null': if data is None : continue validations [ 'outputs' ] . append ( variable ) # null check od = OrderedDict ( ) od [ 'data' ] = data od [ 'data_type' ] = 'redis' od [ 'operator' ] = 'eq' od [ 'variable' ] = variable # if variable.endswith('Array'): # od['data'] = json.loads(data) # od['data_type'] = 'redis' # od['operator'] = 'eq' # od['variable'] = variable # elif variable.endswith('Binary'): # od['data'] = json.loads(data) # od['data_type'] = 'redis' # od['operator'] = 'eq' # od['variable'] = variable # elif variable.endswith('String'): # od['data'] = json.loads(data) # od['data_type'] = 'redis' # od['operator'] = 'eq' # od['variable'] = variable validations [ 'rules' ] . append ( od ) fqfn = profile_data . get ( 'fqfn' ) with open ( fqfn , 'r+' ) as fh : data = json . load ( fh ) for profile in data : if profile . get ( 'profile_name' ) == self . args . profile_name : profile [ 'validations' ] = validations . get ( 'rules' ) profile [ 'args' ] [ 'default' ] [ 'tc_playbook_out_variables' ] = ',' . join ( validations . get ( 'outputs' ) ) fh . seek ( 0 ) fh . write ( json . dumps ( data , indent = 2 , sort_keys = True ) ) fh . truncate ( )
Replace the validation configuration in the selected profile .
658
10
27,632
def validate ( self , profile ) : ij = self . load_install_json ( profile . get ( 'install_json' ) ) print ( '{}{}Profile: "{}".' . format ( c . Style . BRIGHT , c . Fore . BLUE , profile . get ( 'profile_name' ) ) ) for arg in self . profile_settings_args_install_json ( ij , None ) : if profile . get ( 'args' , { } ) . get ( 'app' , { } ) . get ( arg ) is None : print ( '{}{}Input "{}" not found.' . format ( c . Style . BRIGHT , c . Fore . YELLOW , arg ) )
Check to see if any args are missing from profile .
155
11
27,633
def validate_layout_display ( self , table , display_condition ) : display = False if display_condition is None : display = True else : display_query = 'select count(*) from {} where {}' . format ( table , display_condition ) try : cur = self . db_conn . cursor ( ) cur . execute ( display_query . replace ( '"' , '' ) ) rows = cur . fetchall ( ) if rows [ 0 ] [ 0 ] > 0 : display = True except sqlite3 . Error as e : print ( '"{}" query returned an error: ({}).' . format ( display_query , e ) ) sys . exit ( 1 ) return display
Check to see if the display condition passes .
147
9
27,634
def validate_profile_exists ( self ) : if self . args . profile_name not in self . profiles : self . handle_error ( 'Could not find profile "{}"' . format ( self . args . profile_name ) )
Validate the provided profiles name exists .
51
8
27,635
def _build_indexes ( self ) : if isinstance ( self . _data , list ) : for d in self . _data : if not isinstance ( d , dict ) : err = u'Cannot build index for non Dict type.' self . _tcex . log . error ( err ) raise RuntimeError ( err ) data_obj = DataObj ( d ) self . _master_index . setdefault ( id ( data_obj ) , data_obj ) for key , value in d . items ( ) : # bcs - update this # if not isinstance(value, (types.StringType, float, int)): # TODO: This is not Python 3 ready if not isinstance ( value , ( float , int , str ) ) : # For comparison operators the value needs to be a StringType self . _tcex . log . debug ( u'Can only build index String Types.' ) continue self . _indexes . setdefault ( key , { } ) . setdefault ( value , [ ] ) . append ( data_obj ) else : err = u'Only *List* data type is currently supported' self . _tcex . log . error ( err ) raise RuntimeError ( err )
Build indexes from data for fast filtering of data .
264
10
27,636
def _starts_with ( field , filter_value ) : valid = False if field . startswith ( filter_value ) : valid = True return valid
Validate field starts with provided value .
34
8
27,637
def filter_data ( self , field , filter_value , filter_operator , field_converter = None ) : data = [ ] if self . _indexes . get ( field ) is not None : data = self . _index_filter ( self . _indexes . get ( field ) , filter_value , filter_operator , field_converter ) # else: # data = self._loop_filter(field, filter_value, filter_operator) # if set_operator == "intersection": # self._filtered_results.intersection(data) # elif set_operator == "union": # self._filtered_results.union(data) return set ( data )
Filter the data given the provided .
151
7
27,638
def operator ( self ) : return { 'EQ' : operator . eq , 'NE' : operator . ne , 'GT' : operator . gt , 'GE' : operator . ge , 'LT' : operator . lt , 'LE' : operator . le , 'SW' : self . _starts_with , 'IN' : self . _in , 'NI' : self . _ni , # not in }
Supported Filter Operators
93
4
27,639
def groups ( self , group_type = None , filters = None , params = None ) : group = self . _tcex . ti . group ( group_type ) for g in self . tc_requests . groups_from_tag ( group , self . name , filters = filters , params = params ) : yield g
Gets all groups from a tag .
70
8
27,640
def indicators ( self , indicator_type = None , filters = None , params = None ) : indicator = self . _tcex . ti . indicator ( indicator_type ) for i in self . tc_requests . indicators_from_tag ( indicator , self . name , filters = filters , params = params ) : yield i
Gets all indicators from a tag .
70
8
27,641
def victims ( self , filters = None , params = None ) : victim = self . _tcex . ti . victim ( None ) for v in self . tc_requests . victims_from_tag ( victim , self . name , filters = filters , params = params ) : yield v
Gets all victims from a tag .
62
8
27,642
def _logger ( ) : logger = logging . getLogger ( __name__ ) logger . setLevel ( logging . DEBUG ) ch = logging . StreamHandler ( sys . stdout ) ch . setLevel ( logging . DEBUG ) logger . addHandler ( ch ) return logger
Initialize basic stream logger .
58
6
27,643
def _renew_token ( self , retry = True ) : self . renewing = True self . log . info ( 'Renewing ThreatConnect Token' ) self . log . info ( 'Current Token Expiration: {}' . format ( self . _token_expiration ) ) try : params = { 'expiredToken' : self . _token } url = '{}/appAuth' . format ( self . _token_url ) r = get ( url , params = params , verify = self . _session . verify ) if not r . ok or 'application/json' not in r . headers . get ( 'content-type' , '' ) : if ( r . status_code == 401 and 'application/json' in r . headers . get ( 'content-type' , '' ) and 'Retry token is invalid' in r . json ( ) . get ( 'message' ) ) : # TODO: remove this once token renewal issue is fixed self . log . error ( 'params: {}' . format ( params ) ) self . log . error ( 'url: {}' . format ( r . url ) ) # log failure err_reason = r . text or r . reason err_msg = 'Token Retry Error. API status code: {}, API message: {}.' raise RuntimeError ( 1042 , err_msg . format ( r . status_code , err_reason ) ) elif retry : warn_msg = 'Token Retry Error. API status code: {}, API message: {}.' self . log . warning ( warn_msg . format ( r . status_code , r . text ) ) # delay and retry token renewal time . sleep ( 15 ) self . _renew_token ( False ) else : err_reason = r . text or r . reason err_msg = 'Token Retry Error. API status code: {}, API message: {}.' raise RuntimeError ( 1042 , err_msg . format ( r . status_code , err_reason ) ) data = r . json ( ) if retry and ( data . get ( 'apiToken' ) is None or data . get ( 'apiTokenExpires' ) is None ) : # add retry logic to handle case if the token renewal doesn't return valid data warn_msg = 'Token Retry Error: no values for apiToken or apiTokenExpires ({}).' self . log . warning ( warn_msg . format ( r . text ) ) self . _renew_token ( False ) else : self . _token = data . get ( 'apiToken' ) self . _token_expiration = int ( data . get ( 'apiTokenExpires' ) ) self . log . info ( 'New Token Expiration: {}' . format ( self . _token_expiration ) ) self . renewing = False except exceptions . SSLError : self . log . error ( u'SSL Error during token renewal.' ) self . renewing = False
Renew expired ThreatConnect Token .
640
7
27,644
def _api_arguments ( self ) : # TC main >= 4.4 token will be passed to jobs. self . add_argument ( '--tc_token' , default = None , help = 'ThreatConnect API Token' ) self . add_argument ( '--tc_token_expires' , default = None , help = 'ThreatConnect API Token Expiration Time' , type = int , ) # TC Integrations Server or TC main < 4.4 self . add_argument ( '--api_access_id' , default = None , help = 'ThreatConnect API Access ID' , required = False ) self . add_argument ( '--api_secret_key' , default = None , help = 'ThreatConnect API Secret Key' , required = False ) # Validate ThreatConnect SSL certificate self . add_argument ( '--tc_verify' , action = 'store_true' , help = 'Validate the ThreatConnect SSL Cert' )
Argument specific to working with TC API .
210
9
27,645
def _batch_arguments ( self ) : self . add_argument ( '--batch_action' , choices = [ 'Create' , 'Delete' ] , default = self . _batch_action , help = 'Action for the batch job' , ) self . add_argument ( '--batch_chunk' , default = self . _batch_chunk , help = 'Max number of indicators per batch' , type = int , ) self . add_argument ( '--batch_halt_on_error' , action = 'store_true' , default = self . _batch_halt_on_error , help = 'Halt batch job on error' , ) self . add_argument ( '--batch_poll_interval' , default = self . _batch_poll_interval , help = 'Frequency to run status check for batch job.' , type = int , ) self . add_argument ( '--batch_poll_interval_max' , default = self . _batch_poll_interval_max , help = 'Maximum amount of time for status check on batch job.' , type = int , ) self . add_argument ( '--batch_write_type' , choices = [ 'Append' , 'Replace' ] , default = self . _batch_write_type , help = 'Append or Replace attributes.' , )
Arguments specific to Batch API writes .
297
9
27,646
def _playbook_arguments ( self ) : self . add_argument ( '--tc_playbook_db_type' , default = self . _tc_playbook_db_type , help = 'Playbook DB type' ) self . add_argument ( '--tc_playbook_db_context' , default = self . _tc_playbook_db_context , help = 'Playbook DB Context' , ) self . add_argument ( '--tc_playbook_db_path' , default = self . _tc_playbook_db_path , help = 'Playbook DB path' ) self . add_argument ( '--tc_playbook_db_port' , default = self . _tc_playbook_db_port , help = 'Playbook DB port' ) self . add_argument ( '--tc_playbook_out_variables' , help = 'Playbook output variables' , required = False )
Argument specific to playbook apps .
208
7
27,647
def run ( self ) : self . batch = self . tcex . batch ( self . args . tc_owner ) # using tcex requests to get built-in features (e.g., proxy, logging, retries) request = self . tcex . request ( ) with request . session as s : r = s . get ( self . url ) if r . ok : decoded_content = r . content . decode ( 'utf-8' ) . splitlines ( ) reader = csv . reader ( decoded_content , delimiter = ',' , quotechar = '"' ) for row in reader : # CSV headers # Firstseen,MD5hash,Malware # skip comments if row [ 0 ] . startswith ( '#' ) : continue # create batch entry file_hash = self . batch . file ( row [ 1 ] , rating = '4.0' , confidence = '100' ) file_hash . tag ( row [ 2 ] ) occurrence = file_hash . occurrence ( ) occurrence . date = row [ 0 ] self . batch . save ( file_hash ) # optionally save object to disk else : self . tcex . exit ( 1 , 'Failed to download CSV data.' ) # submit batch job batch_status = self . batch . submit_all ( ) print ( batch_status ) self . exit_message = 'Downloaded data and create batch job.'
Run main App logic .
302
5
27,648
def get ( self , url_path ) : request_url = "%s%s" % ( self . _url , quote ( url_path . encode ( "utf_8" ) ) ) response = self . session . get ( request_url ) if response . status_code == 200 and self . verbose : print "GET %s: Success." % request_url elif response . status_code >= 400 : error_handler ( JSSGetError , response ) # requests GETs JSS data as XML encoded in utf-8, but # ElementTree.fromstring wants a string. jss_results = response . text . encode ( "utf-8" ) try : xmldata = ElementTree . fromstring ( jss_results ) except ElementTree . ParseError : raise JSSGetError ( "Error Parsing XML:\n%s" % jss_results ) return xmldata
GET a url handle errors and return an etree .
198
11
27,649
def post ( self , obj_class , url_path , data ) : # The JSS expects a post to ID 0 to create an object request_url = "%s%s" % ( self . _url , url_path ) data = ElementTree . tostring ( data ) response = self . session . post ( request_url , data = data ) if response . status_code == 201 and self . verbose : print "POST %s: Success" % request_url elif response . status_code >= 400 : error_handler ( JSSPostError , response ) # Get the ID of the new object. JSS returns xml encoded in utf-8 jss_results = response . text . encode ( "utf-8" ) id_ = int ( re . search ( r"<id>([0-9]+)</id>" , jss_results ) . group ( 1 ) ) return self . factory . get_object ( obj_class , id_ )
POST an object to the JSS . For creating new objects only .
210
14
27,650
def put ( self , url_path , data ) : request_url = "%s%s" % ( self . _url , url_path ) data = ElementTree . tostring ( data ) response = self . session . put ( request_url , data ) if response . status_code == 201 and self . verbose : print "PUT %s: Success." % request_url elif response . status_code >= 400 : error_handler ( JSSPutError , response )
Update an existing object on the JSS .
103
9
27,651
def delete ( self , url_path , data = None ) : request_url = "%s%s" % ( self . _url , url_path ) if data : response = self . session . delete ( request_url , data = data ) else : response = self . session . delete ( request_url ) if response . status_code == 200 and self . verbose : print "DEL %s: Success." % request_url elif response . status_code >= 400 : error_handler ( JSSDeleteError , response )
Delete an object from the JSS .
115
8
27,652
def _docstring_parameter ( obj_type , subset = False ) : # pylint: disable=no-self-argument docstring = ( "Flexibly search the JSS for objects of type {}.\n\n\tArgs:\n\t\t" "Data: Allows different types to conduct different types of " "searches. Argument of type:\n\t\t\tNone (or Provide no argument) " "to search for all objects.\n\t\t\tInt to search for an object by " "ID.\n\t\t\tString to search for an object by name.\n\t\t\t" "xml.etree.ElementTree.Element to create a new object from the " "Element's data.{}\n\n\tReturns:\n\t\tJSSObjectList for empty " "data arguments.\n\t\tReturns an object of type {} for searches " "and new objects.\n\t\t(FUTURE) Will return None if nothing is " "found that match the search criteria.\n\n\tRaises:\n\t\t" "JSSGetError for nonexistent objects." ) if subset : subset_string = ( "\n\t\tsubset: A list of XML subelement tags to request\n" "\t\t\t(e.g. ['general', 'purchasing']), OR an '&' \n\t\t\t" "delimited string (e.g. 'general&purchasing')." ) else : subset_string = "" def dec ( obj ) : """Dynamically decorate a docstring.""" class_name = str ( obj_type ) [ : - 2 ] . rsplit ( "." ) [ - 1 ] updated_docstring = docstring . format ( class_name , subset_string , class_name ) obj . __doc__ = obj . __doc__ . format ( dynamic_docstring = updated_docstring ) return obj return dec
Decorator for adding _docstring to repetitive methods .
450
12
27,653
def pickle_all ( self , path ) : all_search_methods = [ ( name , self . __getattribute__ ( name ) ) for name in dir ( self ) if name [ 0 ] . isupper ( ) ] # all_search_methods = [("Account", self.__getattribute__("Account")), ("Package", self.__getattribute__("Package"))] all_objects = { } for method in all_search_methods : result = method [ 1 ] ( ) if isinstance ( result , JSSFlatObject ) : all_objects [ method [ 0 ] ] = result else : try : all_objects [ method [ 0 ] ] = result . retrieve_all ( ) except JSSGetError : # A failure to get means the object type has zero # results. print method [ 0 ] , " has no results! (GETERRROR)" all_objects [ method [ 0 ] ] = [ ] # all_objects = {method[0]: method[1]().retrieve_all() # for method in all_search_methods} with open ( os . path . expanduser ( path ) , "wb" ) as pickle : cPickle . Pickler ( pickle , cPickle . HIGHEST_PROTOCOL ) . dump ( all_objects )
Back up entire JSS to a Python Pickle .
284
11
27,654
def from_pickle ( cls , path ) : with open ( os . path . expanduser ( path ) , "rb" ) as pickle : return cPickle . Unpickler ( pickle ) . load ( )
Load all objects from pickle file and return as dict .
49
12
27,655
def write_all ( self , path ) : all_search_methods = [ ( name , self . __getattribute__ ( name ) ) for name in dir ( self ) if name [ 0 ] . isupper ( ) ] # all_search_methods = [("Account", self.__getattribute__("Account")), ("Package", self.__getattribute__("Package"))] all_objects = { } for method in all_search_methods : result = method [ 1 ] ( ) if isinstance ( result , JSSFlatObject ) : all_objects [ method [ 0 ] ] = result else : try : all_objects [ method [ 0 ] ] = result . retrieve_all ( ) except JSSGetError : # A failure to get means the object type has zero # results. print method [ 0 ] , " has no results! (GETERRROR)" all_objects [ method [ 0 ] ] = [ ] # all_objects = {method[0]: method[1]().retrieve_all() # for method in all_search_methods} with open ( os . path . expanduser ( path ) , "w" ) as ofile : root = ElementTree . Element ( "JSS" ) for obj_type , objects in all_objects . items ( ) : if objects is not None : sub_element = ElementTree . SubElement ( root , obj_type ) sub_element . extend ( objects ) et = ElementTree . ElementTree ( root ) et . write ( ofile , encoding = "utf-8" )
Back up entire JSS to XML file .
336
9
27,656
def load_from_xml ( self , path ) : with open ( os . path . expanduser ( path ) , "r" ) as ifile : et = ElementTree . parse ( ifile ) root = et . getroot ( ) all_objects = { } for child in root : obj_type = self . __getattribute__ ( child . tag ) objects = [ obj_type ( obj ) for obj in child ] all_objects [ child . tag ] = JSSObjectList ( self . factory , None , objects ) return all_objects
Load all objects from XML file and return as dict .
117
11
27,657
def get_object ( self , obj_class , data = None , subset = None ) : if subset : if not isinstance ( subset , list ) : if isinstance ( subset , basestring ) : subset = subset . split ( "&" ) else : raise TypeError if data is None : return self . get_list ( obj_class , data , subset ) elif isinstance ( data , ( basestring , int ) ) : return self . get_individual_object ( obj_class , data , subset ) elif isinstance ( data , ElementTree . Element ) : return self . get_new_object ( obj_class , data ) else : raise ValueError
Return a subclassed JSSObject instance by querying for existing objects or posting a new object .
145
20
27,658
def get_list ( self , obj_class , data , subset ) : url = obj_class . get_url ( data ) if obj_class . can_list and obj_class . can_get : if ( subset and len ( subset ) == 1 and subset [ 0 ] . upper ( ) == "BASIC" ) and obj_class is jssobjects . Computer : url += "/subset/basic" result = self . jss . get ( url ) if obj_class . container : result = result . find ( obj_class . container ) return self . _build_jss_object_list ( result , obj_class ) # Single object elif obj_class . can_get : xmldata = self . jss . get ( url ) return obj_class ( self . jss , xmldata ) else : raise JSSMethodNotAllowedError ( obj_class . __class__ . __name__ )
Get a list of objects as JSSObjectList .
206
11
27,659
def get_individual_object ( self , obj_class , data , subset ) : if obj_class . can_get : url = obj_class . get_url ( data ) if subset : if not "general" in subset : subset . append ( "general" ) url += "/subset/%s" % "&" . join ( subset ) xmldata = self . jss . get ( url ) # Some name searches may result in multiple found # objects. e.g. A computer search for "MacBook Pro" may # return ALL computers which have not had their name # changed. if xmldata . find ( "size" ) is not None : return self . _build_jss_object_list ( xmldata , obj_class ) else : return obj_class ( self . jss , xmldata ) else : raise JSSMethodNotAllowedError ( obj_class . __class__ . __name__ )
Return a JSSObject of type obj_class searched for by data .
207
15
27,660
def _build_jss_object_list ( self , response , obj_class ) : response_objects = [ item for item in response if item is not None and item . tag != "size" ] objects = [ JSSListData ( obj_class , { i . tag : i . text for i in response_object } , self ) for response_object in response_objects ] return JSSObjectList ( self , obj_class , objects )
Build a JSSListData object from response .
97
10
27,661
def convert_response_to_text ( response ) : # Responses are sent as html. Split on the newlines and give us # the <p> text back. errorlines = response . text . encode ( "utf-8" ) . split ( "\n" ) error = [ ] pattern = re . compile ( r"<p.*>(.*)</p>" ) for line in errorlines : content_line = re . search ( pattern , line ) if content_line : error . append ( content_line . group ( 1 ) ) return ". " . join ( error )
Convert a JSS HTML response to plaintext .
123
11
27,662
def error_handler ( exception_cls , response ) : # Responses are sent as html. Split on the newlines and give us # the <p> text back. error = convert_response_to_text ( response ) exception = exception_cls ( "Response Code: %s\tResponse: %s" % ( response . status_code , error ) ) exception . status_code = response . status_code raise exception
Handle HTTP errors by formatting into strings .
93
8
27,663
def loop_until_valid_response ( prompt ) : responses = { "Y" : True , "YES" : True , "TRUE" : True , "N" : False , "NO" : False , "FALSE" : False } response = "" while response . upper ( ) not in responses : response = raw_input ( prompt ) return responses [ response . upper ( ) ]
Loop over entering input until it is a valid bool - ish response .
83
15
27,664
def indent_xml ( elem , level = 0 , more_sibs = False ) : i = "\n" pad = " " if level : i += ( level - 1 ) * pad num_kids = len ( elem ) if num_kids : if not elem . text or not elem . text . strip ( ) : elem . text = i + pad if level : elem . text += pad count = 0 for kid in elem : if kid . tag == "data" : kid . text = "*DATA*" indent_xml ( kid , level + 1 , count < num_kids - 1 ) count += 1 if not elem . tail or not elem . tail . strip ( ) : elem . tail = i if more_sibs : elem . tail += pad else : if level and ( not elem . tail or not elem . tail . strip ( ) ) : elem . tail = i if more_sibs : elem . tail += pad
Indent an xml element object to prepare for pretty printing .
216
12
27,665
def element_repr ( self ) : # deepcopy so we don't mess with the valid XML. pretty_data = copy . deepcopy ( self ) indent_xml ( pretty_data ) return ElementTree . tostring ( pretty_data ) . encode ( "utf-8" )
Return a string with indented XML data .
61
9
27,666
def sort ( self ) : super ( JSSObjectList , self ) . sort ( key = lambda k : k . id )
Sort list elements by ID .
27
6
27,667
def sort_by_name ( self ) : super ( JSSObjectList , self ) . sort ( key = lambda k : k . name )
Sort list elements by name .
31
6
27,668
def retrieve_by_id ( self , id_ ) : items_with_id = [ item for item in self if item . id == int ( id_ ) ] if len ( items_with_id ) == 1 : return items_with_id [ 0 ] . retrieve ( )
Return a JSSObject for the element with ID id_
61
12
27,669
def retrieve_all ( self , subset = None ) : # Attempt to speed this procedure up as much as can be done. get_object = self . factory . get_object obj_class = self . obj_class full_objects = [ get_object ( obj_class , list_obj . id , subset ) for list_obj in self ] return JSSObjectList ( self . factory , obj_class , full_objects )
Return a list of all JSSListData elements as full JSSObjects .
92
17
27,670
def pickle ( self , path ) : with open ( os . path . expanduser ( path ) , "wb" ) as pickle : cPickle . Pickler ( pickle , cPickle . HIGHEST_PROTOCOL ) . dump ( self )
Write objects to python pickle .
57
7
27,671
def readPlistFromString ( data ) : try : plistData = buffer ( data ) except TypeError , err : raise NSPropertyListSerializationException ( err ) dataObject , dummy_plistFormat , error = ( NSPropertyListSerialization . propertyListFromData_mutabilityOption_format_errorDescription_ ( plistData , NSPropertyListMutableContainers , None , None ) ) if dataObject is None : if error : error = error . encode ( 'ascii' , 'ignore' ) else : error = "Unknown error" raise NSPropertyListSerializationException ( error ) else : return dataObject
Read a plist data from a string . Return the root object .
134
14
27,672
def writePlist ( dataObject , filepath ) : plistData , error = ( NSPropertyListSerialization . dataFromPropertyList_format_errorDescription_ ( dataObject , NSPropertyListXMLFormat_v1_0 , None ) ) if plistData is None : if error : error = error . encode ( 'ascii' , 'ignore' ) else : error = "Unknown error" raise NSPropertyListSerializationException ( error ) else : if plistData . writeToFile_atomically_ ( filepath , True ) : return else : raise NSPropertyListWriteException ( "Failed to write plist data to %s" % filepath )
Write rootObject as a plist to filepath .
145
11
27,673
def writePlistToString ( rootObject ) : plistData , error = ( NSPropertyListSerialization . dataFromPropertyList_format_errorDescription_ ( rootObject , NSPropertyListXMLFormat_v1_0 , None ) ) if plistData is None : if error : error = error . encode ( 'ascii' , 'ignore' ) else : error = "Unknown error" raise NSPropertyListSerializationException ( error ) else : return str ( plistData )
Return rootObject as a plist - formatted string .
106
11
27,674
def _new ( self , name , * * kwargs ) : # Name is required, so set it outside of the helper func. if self . _name_path : parent = self for path_element in self . _name_path . split ( "/" ) : self . _set_xml_from_keys ( parent , ( path_element , None ) ) parent = parent . find ( path_element ) parent . text = name else : ElementTree . SubElement ( self , "name" ) . text = name for item in self . data_keys . items ( ) : self . _set_xml_from_keys ( self , item , * * kwargs )
Create a new JSSObject with name and keys .
146
11
27,675
def _set_xml_from_keys ( self , root , item , * * kwargs ) : key , val = item target_key = root . find ( key ) if target_key is None : target_key = ElementTree . SubElement ( root , key ) if isinstance ( val , dict ) : for dict_item in val . items ( ) : self . _set_xml_from_keys ( target_key , dict_item , * * kwargs ) return # Convert kwarg data to the appropriate string. if key in kwargs : kwarg = kwargs [ key ] if isinstance ( kwarg , bool ) : kwargs [ key ] = str ( kwargs [ key ] ) . lower ( ) elif kwarg is None : kwargs [ key ] = "" elif isinstance ( kwarg , int ) : kwargs [ key ] = str ( kwargs [ key ] ) elif isinstance ( kwarg , JSSObject ) : kwargs [ key ] = kwargs [ key ] . name target_key . text = kwargs . get ( key , val )
Create SubElements of root with kwargs .
253
11
27,676
def get_url ( cls , data ) : try : data = int ( data ) except ( ValueError , TypeError ) : pass if isinstance ( data , int ) : return "%s%s%s" % ( cls . _url , cls . id_url , data ) elif data is None : return cls . _url elif isinstance ( data , basestring ) : if "=" in data : key , value = data . split ( "=" ) # pylint: disable=no-member if key in cls . search_types : return "%s%s%s" % ( cls . _url , cls . search_types [ key ] , value ) else : raise JSSUnsupportedSearchMethodError ( "This object cannot be queried by %s." % key ) else : return "%s%s%s" % ( cls . _url , cls . search_types [ cls . default_search ] , data ) else : raise ValueError
Return the URL for a get request based on data type .
217
12
27,677
def url ( self ) : if self . id : url = "%s%s%s" % ( self . _url , self . id_url , self . id ) else : url = None return url
Return the path subcomponent of the url to this object .
44
12
27,678
def delete ( self , data = None ) : if not self . can_delete : raise JSSMethodNotAllowedError ( self . __class__ . __name__ ) if data : self . jss . delete ( self . url , data ) else : self . jss . delete ( self . url )
Delete this object from the JSS .
66
8
27,679
def save ( self ) : # Object probably exists if it has an ID (user can't assign # one). The only objects that don't have an ID are those that # cannot list. if self . can_put and ( not self . can_list or self . id ) : # The JSS will reject PUT requests for objects that do not have # a category. The JSS assigns a name of "No category assigned", # which it will reject. Therefore, if that is the category # name, changed it to "", which is accepted. categories = [ elem for elem in self . findall ( "category" ) ] categories . extend ( [ elem for elem in self . findall ( "category/name" ) ] ) for cat_tag in categories : if cat_tag . text == "No category assigned" : cat_tag . text = "" try : self . jss . put ( self . url , self ) updated_data = self . jss . get ( self . url ) except JSSPutError as put_error : # Something when wrong. raise JSSPutError ( put_error ) elif self . can_post : url = self . get_post_url ( ) try : updated_data = self . jss . post ( self . __class__ , url , self ) except JSSPostError as err : raise JSSPostError ( err ) else : raise JSSMethodNotAllowedError ( self . __class__ . __name__ ) # Replace current instance's data with new, JSS-validated data. self . clear ( ) for child in updated_data . getchildren ( ) : self . _children . append ( child )
Update or create a new object on the JSS .
361
11
27,680
def _handle_location ( self , location ) : if not isinstance ( location , ElementTree . Element ) : element = self . find ( location ) if element is None : raise ValueError ( "Invalid path!" ) else : element = location return element
Return an element located at location with flexible args .
53
10
27,681
def set_bool ( self , location , value ) : element = self . _handle_location ( location ) if isinstance ( value , basestring ) : value = True if value . upper ( ) == "TRUE" else False elif not isinstance ( value , bool ) : raise ValueError if value is True : element . text = "true" else : element . text = "false"
Set a boolean value .
85
5
27,682
def add_object_to_path ( self , obj , location ) : location = self . _handle_location ( location ) location . append ( obj . as_list_data ( ) ) results = [ item for item in location . getchildren ( ) if item . findtext ( "id" ) == obj . id ] [ 0 ] return results
Add an object of type JSSContainerObject to location .
74
12
27,683
def remove_object_from_list ( self , obj , list_element ) : list_element = self . _handle_location ( list_element ) if isinstance ( obj , JSSObject ) : results = [ item for item in list_element . getchildren ( ) if item . findtext ( "id" ) == obj . id ] elif isinstance ( obj , ( int , basestring ) ) : results = [ item for item in list_element . getchildren ( ) if item . findtext ( "id" ) == str ( obj ) or item . findtext ( "name" ) == obj ] if len ( results ) == 1 : list_element . remove ( results [ 0 ] ) elif len ( results ) > 1 : raise ValueError ( "There is more than one matching object at that " "path!" )
Remove an object from a list element .
180
8
27,684
def from_file ( cls , jss , filename ) : tree = ElementTree . parse ( filename ) root = tree . getroot ( ) return cls ( jss , root )
Create a new JSSObject from an external XML file .
40
12
27,685
def from_string ( cls , jss , xml_string ) : root = ElementTree . fromstring ( xml_string . encode ( 'utf-8' ) ) return cls ( jss , root )
Creates a new JSSObject from an UTF - 8 XML string .
46
15
27,686
def to_file ( self , path ) : with open ( os . path . expanduser ( path ) , "w" ) as ofile : ofile . write ( self . __repr__ ( ) )
Write object XML to path .
45
6
27,687
def as_list_data ( self ) : element = ElementTree . Element ( self . list_type ) id_ = ElementTree . SubElement ( element , "id" ) id_ . text = self . id name = ElementTree . SubElement ( element , "name" ) name . text = self . name return element
Return an Element to be used in a list .
69
10
27,688
def add_criterion ( self , name , priority , and_or , search_type , value ) : # pylint: disable=too-many-arguments criterion = SearchCriteria ( name , priority , and_or , search_type , value ) self . criteria . append ( criterion )
Add a search criteria object to a smart group .
64
10
27,689
def is_smart ( self , value ) : self . set_bool ( "is_smart" , value ) if value is True : if self . find ( "criteria" ) is None : # pylint: disable=attribute-defined-outside-init self . criteria = ElementTree . SubElement ( self , "criteria" )
Set group is_smart property to value .
73
9
27,690
def add_device ( self , device , container ) : # There is a size tag which the JSS manages for us, so we can # ignore it. if self . findtext ( "is_smart" ) == "false" : self . add_object_to_path ( device , container ) else : # Technically this isn't true. It will strangely accept # them, and they even show up as members of the group! raise ValueError ( "Devices may not be added to smart groups." )
Add a device to a group . Wraps JSSObject . add_object_to_path .
108
21
27,691
def has_member ( self , device_object ) : if device_object . tag == "computer" : container_search = "computers/computer" elif device_object . tag == "mobile_device" : container_search = "mobile_devices/mobile_device" else : raise ValueError return len ( [ device for device in self . findall ( container_search ) if device . findtext ( "id" ) == device_object . id ] ) is not 0
Return bool whether group has a device as a member .
103
11
27,692
def copy ( self , filename , id_ = - 1 , pre_callback = None , post_callback = None ) : for repo in self . _children : if is_package ( filename ) : copy_method = repo . copy_pkg else : # All other file types can go to scripts. copy_method = repo . copy_script if pre_callback : pre_callback ( repo . connection ) copy_method ( filename , id_ ) if post_callback : post_callback ( repo . connection )
Copy a package or script to all repos .
107
10
27,693
def copy_pkg ( self , filename , id_ = - 1 ) : for repo in self . _children : repo . copy_pkg ( filename , id_ )
Copy a pkg dmg or zip to all repositories .
35
11
27,694
def copy_script ( self , filename , id_ = - 1 ) : for repo in self . _children : repo . copy_script ( filename , id_ )
Copy a script to all repositories .
35
7
27,695
def delete ( self , filename ) : for repo in self . _children : if hasattr ( repo , "delete" ) : repo . delete ( filename )
Delete a file from all repositories which support it .
33
10
27,696
def umount ( self , forced = True ) : for child in self . _children : if hasattr ( child , "umount" ) : child . umount ( forced )
Umount all mountable distribution points .
38
8
27,697
def exists ( self , filename ) : result = True for repo in self . _children : if not repo . exists ( filename ) : result = False return result
Report whether a file exists on all distribution points .
33
10
27,698
def _get_user_input ( prompt , key_name , parent , input_func = raw_input ) : val = input_func ( prompt ) ElementTree . SubElement ( parent , "key" ) . text = key_name if isinstance ( val , bool ) : string_val = "true" if val else "false" ElementTree . SubElement ( parent , string_val ) else : ElementTree . SubElement ( parent , "string" ) . text = val return val
Prompt the user for a value and assign it to key_name .
105
15
27,699
def _handle_dist_server ( ds_type , repos_array ) : if ds_type not in ( "JDS" , "CDP" ) : raise ValueError ( "Must be JDS or CDP" ) prompt = "Does your JSS use a %s? (Y|N): " % ds_type result = loop_until_valid_response ( prompt ) if result : repo_dict = ElementTree . SubElement ( repos_array , "dict" ) repo_name_key = ElementTree . SubElement ( repo_dict , "key" ) repo_name_key . text = "type" repo_name_string = ElementTree . SubElement ( repo_dict , "string" ) repo_name_string . text = ds_type
Ask user for whether to use a type of dist server .
172
12