idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
27,300
def stage ( self ) : for sd in self . staging_data : if not isinstance ( sd , dict ) : # reported issue from qa where staging data is invalid msg = 'Invalid staging data provided ({}).' . format ( sd ) sys . exit ( msg ) data_type = sd . get ( 'data_type' , 'redis' ) if data_type == 'redis' : self . log . debug ( 'Stage Redis Data' ) self . stage_redis ( sd . get ( 'variable' ) , sd . get ( 'data' ) ) elif data_type in [ 'redis-array' , 'redis_array' ] : self . log . debug ( 'Stage Redis Array' ) out_variable = sd . get ( 'variable' ) # build array redis_array = [ ] for var in sd . get ( 'data' , { } ) . get ( 'variables' ) or [ ] : variable = var . get ( 'value' ) if variable . endswith ( 'Binary' ) : data = self . tcex . playbook . read_binary ( variable , False , False ) elif variable . endswith ( 'BinaryArray' ) : data = self . tcex . playbook . read_binary_array ( variable , False , False ) else : data = self . path_data ( self . tcex . playbook . read ( variable ) , var . get ( 'path' ) ) # TODO: should None value be appended? redis_array . append ( data ) self . stage_redis ( out_variable , redis_array ) # print(redis_array) elif data_type == 'threatconnect' : self . log . debug ( 'Stage ThreatConnect Data' ) self . stage_tc ( sd . get ( 'data_owner' ) , sd . get ( 'data' , { } ) , sd . get ( 'variable' ) ) elif data_type == 'threatconnect-association' : self . log . debug ( 'Stage ThreatConnect Association Data' ) data = sd . get ( 'data' ) self . stage_tc_associations ( data . get ( 'entity1' ) , data . get ( 'entity2' ) ) elif data_type == 'threatconnect-batch' : self . log . debug ( 'Stage ThreatConnect Batch Data' ) self . stage_tc_batch ( sd . get ( 'data_owner' ) , sd . get ( 'data' , { } ) )
Stage Redis and ThreatConnect data defined in profile .
553
11
27,301
def staging_data ( self ) : if self . _staging_data is None : staging_data = [ ] for staging_file in self . profile . get ( 'data_files' ) or [ ] : if os . path . isfile ( staging_file ) : print ( 'Staging Data: {}{}{}' . format ( c . Style . BRIGHT , c . Fore . MAGENTA , staging_file ) ) self . log . info ( '[stage] Staging data file: {}' . format ( staging_file ) ) f = open ( staging_file , 'r' ) staging_data . extend ( json . load ( f ) ) f . close ( ) else : print ( '{}{}Could not find file {}.' . format ( c . Style . BRIGHT , c . Fore . RED , staging_file ) ) self . _staging_data = staging_data return self . _staging_data
Read data files and return all staging data for current profile .
202
12
27,302
def stage_redis ( self , variable , data ) : if isinstance ( data , int ) : data = str ( data ) # handle binary if variable . endswith ( 'Binary' ) : try : data = base64 . b64decode ( data ) except binascii . Error : msg = 'The Binary staging data for variable {} is not properly base64 encoded.' msg = msg . format ( variable ) sys . exit ( msg ) elif variable . endswith ( 'BinaryArray' ) : if isinstance ( data , string_types ) : data = json . loads ( data ) try : # loop through each entry decoded_data = [ ] for d in data : d_decoded = base64 . b64decode ( d ) decoded_data . append ( d_decoded ) data = decoded_data except binascii . Error : msg = 'The BinaryArray staging data for variable {} is not properly base64 encoded.' msg = msg . format ( variable ) sys . exit ( msg ) self . log . info ( u'[stage] Creating variable {}' . format ( variable ) ) self . tcex . playbook . create ( variable , data )
Stage data in Redis .
258
6
27,303
def stage_tc ( self , owner , staging_data , variable ) : # parse resource_data resource_type = staging_data . pop ( 'type' ) if resource_type in self . tcex . indicator_types or resource_type in self . tcex . group_types : try : attributes = staging_data . pop ( 'attribute' ) except KeyError : attributes = [ ] try : security_labels = staging_data . pop ( 'security_label' ) except KeyError : security_labels = [ ] try : tags = staging_data . pop ( 'tag' ) except KeyError : tags = [ ] resource = self . tcex . resource ( resource_type ) resource . http_method = 'POST' resource . owner = owner # special case for Email Group Type if resource_type == 'Email' : resource . add_payload ( 'option' , 'createVictims' ) self . log . debug ( 'body: {}' . format ( staging_data ) ) resource . body = json . dumps ( staging_data ) response = resource . request ( ) if response . get ( 'status' ) == 'Success' : # add resource id if resource_type in self . tcex . indicator_types : resource_id = resource . summary ( response . get ( 'data' ) ) self . log . info ( '[stage] Creating resource {}:{}' . format ( resource_type , resource_id ) ) elif resource_type in self . tcex . group_types : self . log . info ( '[stage] Creating resource {}:{}' . format ( resource_type , response . get ( 'data' , { } ) . get ( 'name' ) ) ) resource_id = response . get ( 'data' , { } ) . get ( 'id' ) self . log . debug ( '[stage] resource_id: {}' . format ( resource_id ) ) resource . resource_id ( resource_id ) entity = self . tcex . playbook . json_to_entity ( response . get ( 'data' ) , resource . value_fields , resource . name , resource . parent ) self . log . debug ( '[stage] Creating Entity: {} ({})' . format ( variable , entity [ 0 ] ) ) self . stage_redis ( variable , entity [ 0 ] ) # self.tcex.playbook.create_tc_entity(variable, entity[0]) # update metadata for attribute_data in attributes : self . stage_tc_create_attribute ( attribute_data . get ( 'type' ) , attribute_data . get ( 'value' ) , resource ) for label_data in security_labels : self . stage_tc_create_security_label ( label_data . get ( 'name' ) , resource ) for tag_data in tags : self . stage_tc_create_tag ( tag_data . get ( 'name' ) , resource ) else : self . log . error ( '[stage] Unsupported resource type {}.' . format ( resource_type ) )
Stage data using ThreatConnect API .
663
7
27,304
def stage_tc_create_security_label ( self , label , resource ) : sl_resource = resource . security_labels ( label ) sl_resource . http_method = 'POST' sl_response = sl_resource . request ( ) if sl_response . get ( 'status' ) != 'Success' : self . log . warning ( '[tcex] Failed adding security label "{}" ({}).' . format ( label , sl_response . get ( 'response' ) . text ) )
Add a security label to a resource .
109
8
27,305
def stage_tc_create_tag ( self , tag , resource ) : tag_resource = resource . tags ( self . tcex . safetag ( tag ) ) tag_resource . http_method = 'POST' t_response = tag_resource . request ( ) if t_response . get ( 'status' ) != 'Success' : self . log . warning ( '[tcex] Failed adding tag "{}" ({}).' . format ( tag , t_response . get ( 'response' ) . text ) )
Add a tag to a resource .
114
7
27,306
def stage_tc_batch ( self , owner , staging_data ) : batch = self . tcex . batch ( owner ) for group in staging_data . get ( 'group' ) or [ ] : # add to redis variable = group . pop ( 'variable' , None ) path = group . pop ( 'path' , None ) data = self . path_data ( group , path ) # update group data if group . get ( 'xid' ) is None : # add xid if one doesn't exist group [ 'xid' ] = self . stage_tc_batch_xid ( group . get ( 'type' ) , group . get ( 'name' ) , owner ) # add owner name group [ 'ownerName' ] = owner # add to batch batch . add_group ( group ) # create tcentity if variable is not None and data is not None : self . stage_redis ( variable , self . stage_tc_group_entity ( data ) ) for indicator in staging_data . get ( 'indicator' ) or [ ] : # add to redis variable = indicator . pop ( 'variable' , None ) path = indicator . pop ( 'path' , None ) if indicator . get ( 'xid' ) is None : indicator [ 'xid' ] = self . stage_tc_batch_xid ( indicator . get ( 'type' ) , indicator . get ( 'summary' ) , owner ) indicator [ 'ownerName' ] = owner # add to batch after extra data has been popped batch . add_indicator ( indicator ) data = self . path_data ( dict ( indicator ) , path ) if variable is not None and data is not None : # if isinstance(data, (dict)): # tcentity uses value as the name # data['value'] = data.pop('summary') self . stage_redis ( variable , self . stage_tc_indicator_entity ( data ) ) # submit batch batch_results = batch . submit ( ) self . log . debug ( '[stage] Batch Results: {}' . format ( batch_results ) ) for error in batch_results . get ( 'errors' ) or [ ] : self . log . error ( '[stage] {}' . format ( error ) )
Stage data in ThreatConnect Platform using batch API .
491
10
27,307
def stage_tc_batch_xid ( xid_type , xid_value , owner ) : xid_string = '{}-{}-{}' . format ( xid_type , xid_value , owner ) hash_object = hashlib . sha256 ( xid_string . encode ( 'utf-8' ) ) return hash_object . hexdigest ( )
Create an xid for a batch job .
88
9
27,308
def stage_tc_indicator_entity ( self , indicator_data ) : path = '@.{value: summary, ' path += 'type: type, ' path += 'ownerName: ownerName, ' path += 'confidence: confidence || `0`, ' path += 'rating: rating || `0`}' return self . path_data ( indicator_data , path )
Convert JSON data to TCEntity .
82
10
27,309
def validate_log_output ( self , passed , db_data , user_data , oper ) : truncate = self . args . truncate if db_data is not None and passed : if isinstance ( db_data , ( string_types ) ) and len ( db_data ) > truncate : db_data = db_data [ : truncate ] elif isinstance ( db_data , ( list ) ) : db_data_truncated = [ ] for d in db_data : if d is not None and isinstance ( d , string_types ) and len ( d ) > truncate : db_data_truncated . append ( '{} ...' . format ( d [ : self . args . truncate ] ) ) else : db_data_truncated . append ( d ) db_data = db_data_truncated if user_data is not None and passed : if isinstance ( user_data , ( string_types ) ) and len ( user_data ) > truncate : user_data = user_data [ : self . args . truncate ] elif isinstance ( user_data , ( list ) ) : user_data_truncated = [ ] for u in user_data : if isinstance ( db_data , ( string_types ) ) and len ( u ) > truncate : user_data_truncated . append ( '{} ...' . format ( u [ : self . args . truncate ] ) ) else : user_data_truncated . append ( u ) user_data = user_data_truncated self . log . info ( '[validate] DB Data : ({}), Type: [{}]' . format ( db_data , type ( db_data ) ) ) self . log . info ( '[validate] Operator : ({})' . format ( oper ) ) self . log . info ( '[validate] User Data : ({}), Type: [{}]' . format ( user_data , type ( user_data ) ) ) if passed : self . log . info ( '[validate] Results : Passed' ) else : self . log . error ( '[validate] Results : Failed' ) if db_data is not None and user_data is not None and oper in [ 'eq' , 'ne' ] : try : diff_count = 0 for i , diff in enumerate ( difflib . ndiff ( db_data , user_data ) ) : if diff [ 0 ] == ' ' : # no difference continue elif diff [ 0 ] == '-' : self . log . info ( '[validate] Diff : Missing data at index {}' . format ( i ) ) elif diff [ 0 ] == '+' : self . log . info ( '[validate] Diff : Extra data at index {}' . format ( i ) ) if diff_count > self . max_diff : # don't spam the logs if string are vastly different self . log . info ( 'Max number of differences reached.' ) break diff_count += 1 except TypeError : pass except KeyError : pass # halt all further actions if self . args . halt_on_fail : raise RuntimeError ( 'Failed validating data.' )
Format the validation log output to be easier to read .
698
11
27,310
def _add_arg_python ( self , key , value = None , mask = False ) : self . _data [ key ] = value if not value : # both false boolean values (flags) and empty values should not be added. pass elif value is True : # true boolean values are flags and should not contain a value self . _args . append ( '--{}' . format ( key ) ) self . _args_quoted . append ( '--{}' . format ( key ) ) self . _args_masked . append ( '--{}' . format ( key ) ) else : self . _args . append ( '--{}={}' . format ( key , value ) ) if mask : # mask sensitive values value = 'x' * len ( str ( value ) ) else : # quote all values that would get displayed value = self . quote ( value ) self . _args_quoted . append ( '--{}={}' . format ( key , value ) ) self . _args_masked . append ( '--{}={}' . format ( key , value ) )
Add CLI Arg formatted specifically for Python .
241
8
27,311
def _add_arg_java ( self , key , value , mask = False ) : if isinstance ( value , bool ) : value = int ( value ) self . _data [ key ] = value self . _args . append ( '{}{}={}' . format ( '-D' , key , value ) ) self . _args_quoted . append ( self . quote ( '{}{}={}' . format ( '-D' , key , value ) ) ) if mask : value = 'x' * len ( str ( value ) ) self . _args_masked . append ( '{}{}={}' . format ( '-D' , key , value ) )
Add CLI Arg formatted specifically for Java .
151
8
27,312
def _add_arg ( self , key , value , mask = False ) : if self . lang == 'python' : self . _add_arg_python ( key , value , mask ) elif self . lang == 'java' : self . _add_arg_java ( key , value , mask )
Add CLI Arg for the correct language .
66
8
27,313
def add ( self , key , value ) : if isinstance ( value , list ) : # TODO: support env vars in list w/masked values for val in value : self . _add_arg_python ( key , val ) elif isinstance ( value , dict ) : err = 'Dictionary types are not currently supported for field.' print ( '{}{}{}' . format ( c . Style . BRIGHT , c . Fore . RED , err ) ) else : mask = False env_var = re . compile ( r'^\$env\.(.*)$' ) envs_var = re . compile ( r'^\$envs\.(.*)$' ) if env_var . match ( str ( value ) ) : # read value from environment variable env_key = env_var . match ( str ( value ) ) . groups ( ) [ 0 ] value = os . environ . get ( env_key , value ) elif envs_var . match ( str ( value ) ) : # read secure value from environment variable env_key = envs_var . match ( str ( value ) ) . groups ( ) [ 0 ] value = os . environ . get ( env_key , value ) mask = True self . _add_arg ( key , value , mask )
Add CLI Arg to lists value .
283
7
27,314
def quote ( self , data ) : if self . lang == 'python' : quote_char = "'" elif self . lang == 'java' : quote_char = "'" if re . findall ( r'[!\-\=\s\$\&]{1,}' , str ( data ) ) : data = '{0}{1}{0}' . format ( quote_char , data ) return data
Quote any parameters that contain spaces or special character .
93
10
27,315
def load ( self , profile_args ) : for key , value in profile_args . items ( ) : self . add ( key , value )
Load provided CLI Args .
31
6
27,316
def add_profile ( self , profile , selected ) : report = Report ( profile ) report . selected = selected if selected : self . report [ 'settings' ] [ 'selected_profiles' ] . append ( report . name ) self . report [ 'settings' ] [ 'selected_profile_count' ] += 1 self . report [ 'settings' ] [ 'total_profile_count' ] += 1 self . profiles . setdefault ( report . name , report ) return report
Add profile to report .
102
5
27,317
def profile ( self , name ) : self . selected_profile = self . profiles . get ( name ) return self . profiles . get ( name )
Return a specific profile .
31
5
27,318
def add_file ( self , filename , file_content ) : self . _group_data [ 'fileName' ] = filename self . _file_content = file_content
Add a file for Document and Report types .
38
9
27,319
def add_key_value ( self , key , value ) : key = self . _metadata_map . get ( key , key ) if key in [ 'dateAdded' , 'eventDate' , 'firstSeen' , 'publishDate' ] : self . _group_data [ key ] = self . _utils . format_datetime ( value , date_format = '%Y-%m-%dT%H:%M:%SZ' ) elif key == 'file_content' : # file content arg is not part of Group JSON pass else : self . _group_data [ key ] = value
Add custom field to Group object .
137
7
27,320
def data ( self ) : # add attributes if self . _attributes : self . _group_data [ 'attribute' ] = [ ] for attr in self . _attributes : if attr . valid : self . _group_data [ 'attribute' ] . append ( attr . data ) # add security labels if self . _labels : self . _group_data [ 'securityLabel' ] = [ ] for label in self . _labels : self . _group_data [ 'securityLabel' ] . append ( label . data ) # add tags if self . _tags : self . _group_data [ 'tag' ] = [ ] for tag in self . _tags : if tag . valid : self . _group_data [ 'tag' ] . append ( tag . data ) return self . _group_data
Return Group data .
180
4
27,321
def security_label ( self , name , description = None , color = None ) : label = SecurityLabel ( name , description , color ) for label_data in self . _labels : if label_data . name == name : label = label_data break else : self . _labels . append ( label ) return label
Return instance of SecurityLabel .
69
6
27,322
def tag ( self , name , formatter = None ) : tag = Tag ( name , formatter ) for tag_data in self . _tags : if tag_data . name == name : tag = tag_data break else : self . _tags . append ( tag ) return tag
Return instance of Tag .
60
5
27,323
def first_seen ( self , first_seen ) : self . _group_data [ 'firstSeen' ] = self . _utils . format_datetime ( first_seen , date_format = '%Y-%m-%dT%H:%M:%SZ' )
Set Document first seen .
65
5
27,324
def event_date ( self , event_date ) : self . _group_data [ 'eventDate' ] = self . _utils . format_datetime ( event_date , date_format = '%Y-%m-%dT%H:%M:%SZ' )
Set the Events event date value .
64
7
27,325
def publish_date ( self , publish_date ) : self . _group_data [ 'publishDate' ] = self . _utils . format_datetime ( publish_date , date_format = '%Y-%m-%dT%H:%M:%SZ' )
Set Report publish date
65
4
27,326
def find_lib_directory ( self ) : lib_directory = None if self . lib_micro_version in self . lib_directories : lib_directory = self . lib_micro_version elif self . lib_minor_version in self . lib_directories : lib_directory = self . lib_minor_version elif self . lib_major_version in self . lib_directories : lib_directory = self . lib_major_version else : for lv in [ self . lib_micro_version , self . lib_minor_version , self . lib_major_version ] : for d in self . lib_directories : if lv in d : lib_directory = d break else : continue break return lib_directory
Find the optimal lib directory .
163
6
27,327
def lib_directories ( self ) : if self . _lib_directories is None : self . _lib_directories = [ ] app_path = os . getcwd ( ) contents = os . listdir ( app_path ) for c in contents : # ensure content starts with lib, is directory, and is readable if c . startswith ( 'lib' ) and os . path . isdir ( c ) and ( os . access ( c , os . R_OK ) ) : self . _lib_directories . append ( c ) return sorted ( self . _lib_directories , reverse = True )
Get all lib directories .
134
5
27,328
def _set_unique_id ( self , json_response ) : self . unique_id = ( json_response . get ( 'md5' ) or json_response . get ( 'sha1' ) or json_response . get ( 'sha256' ) or '' )
Sets the unique_id provided a json response .
60
11
27,329
def rating ( self , value ) : if not self . can_update ( ) : self . _tcex . handle_error ( 910 , [ self . type ] ) request_data = { 'rating' : value } return self . tc_requests . update ( self . api_type , self . api_sub_type , self . unique_id , request_data , owner = self . owner )
Updates the Indicators rating
89
6
27,330
def add_observers ( self , count , date_observed ) : if not self . can_update ( ) : self . _tcex . handle_error ( 910 , [ self . type ] ) data = { 'count' : count , 'dataObserved' : self . _utils . format_datetime ( date_observed , date_format = '%Y-%m-%dT%H:%M:%SZ' ) , } return self . tc_requests . add_observations ( self . api_type , self . api_sub_type , self . unique_id , data , owner = self . owner )
Adds a Indicator Observation
146
6
27,331
def deleted ( self , deleted_since , filters = None , params = None ) : return self . tc_requests . deleted ( self . api_type , self . api_sub_type , deleted_since , owner = self . owner , filters = filters , params = params , )
Gets the indicators deleted .
61
6
27,332
def build_summary ( val1 = None , val2 = None , val3 = None ) : summary = [ ] if val1 is not None : summary . append ( val1 ) if val2 is not None : summary . append ( val2 ) if val3 is not None : summary . append ( val3 ) if not summary : return None return ' : ' . join ( summary )
Constructs the summary given va1 va2 val3
82
11
27,333
def name ( self , name ) : self . _data [ 'name' ] = name request = self . _base_request request [ 'name' ] = name return self . _tc_requests . update ( request , owner = self . owner )
Updates the security labels name .
54
7
27,334
def color ( self , color ) : self . _data [ 'color' ] = color request = self . _base_request request [ 'color' ] = color return self . _tc_requests . update ( request , owner = self . owner )
Updates the security labels color .
54
7
27,335
def description ( self , description ) : self . _data [ 'description' ] = description request = self . _base_request request [ 'description' ] = description return self . _tc_requests . update ( request , owner = self . owner )
Updates the security labels description .
54
7
27,336
def date_added ( self , date_added ) : date_added = self . _utils . format_datetime ( date_added , date_format = '%Y-%m-%dT%H:%M:%SZ' ) self . _data [ 'dateAdded' ] = date_added request = self . _base_request request [ 'dateAdded' ] = date_added return self . _tc_requests . update ( request , owner = self . owner )
Updates the security labels date_added
107
8
27,337
def first_seen ( self , first_seen ) : if not self . can_update ( ) : self . _tcex . handle_error ( 910 , [ self . type ] ) first_seen = self . _utils . format_datetime ( first_seen , date_format = '%Y-%m-%dT%H:%M:%SZ' ) self . _data [ 'firstSeen' ] = first_seen request = { 'firstSeen' : first_seen } return self . tc_requests . update ( self . api_type , self . api_sub_type , self . unique_id , request )
Updates the campaign with the new first_seen date .
145
12
27,338
def add_filter ( self , filter_key , operator , value ) : filter_key = self . _metadata_map . get ( filter_key , filter_key ) self . filters . append ( { 'filter' : filter_key , 'operator' : operator , 'value' : value } )
Adds a filter given a key operator and value
65
9
27,339
def recipients ( self , notification_type , recipients , priority = 'Low' ) : self . _notification_type = notification_type self . _recipients = recipients self . _priority = priority self . _is_organization = False
Set vars for the passed in data . Used for one or more recipient notification .
52
17
27,340
def org ( self , notification_type , priority = 'Low' ) : self . _notification_type = notification_type self . _recipients = None self . _priority = priority self . _is_organization = True
Set vars for the passed in data . Used for org notification .
50
14
27,341
def send ( self , message ) : body = { 'notificationType' : self . _notification_type , 'priority' : self . _priority , 'isOrganization' : self . _is_organization , 'message' : message , } if self . _recipients : body [ 'recipients' ] = self . _recipients self . _tcex . log . debug ( 'notification body: {}' . format ( json . dumps ( body ) ) ) # create our tcex resource resource = resource = self . _tcex . resource ( 'Notification' ) resource . http_method = 'POST' resource . body = json . dumps ( body ) results = resource . request ( ) # do the request if results . get ( 'response' ) . status_code == 200 : # everything worked response = results . get ( 'response' ) . json ( ) elif results . get ( 'response' ) . status_code == 400 : # failed..but known... user doesn't exist # just return and let calling app handle it err = 'Failed to send notification ({})' . format ( results . get ( 'response' ) . text ) self . _tcex . log . error ( err ) response = results . get ( 'response' ) . json ( ) else : # somekind of unknown error...raise err = 'Failed to send notification ({})' . format ( results . get ( 'response' ) . text ) self . _tcex . log . error ( err ) raise RuntimeError ( err ) return response
Send our message
339
3
27,342
def assets ( self , asset_type = None ) : if not self . can_update ( ) : self . _tcex . handle_error ( 910 , [ self . type ] ) if not asset_type : return self . tc_requests . adversary_assets ( self . api_type , self . api_sub_type , self . unique_id ) if asset_type == 'PHONE' : return self . tc_requests . adversary_phone_assets ( self . api_type , self . api_sub_type , self . unique_id ) if asset_type == 'HANDLER' : return self . tc_requests . adversary_handle_assets ( self . api_type , self . api_sub_type , self . unique_id ) if asset_type == 'URL' : return self . tc_requests . adversary_url_assets ( self . api_type , self . api_sub_type , self . unique_id ) self . _tcex . handle_error ( 925 , [ 'asset_type' , 'assets' , 'asset_type' , 'asset_type' , asset_type ] ) return None
Retrieves all of the assets of a given asset_type
259
13
27,343
def delete_asset ( self , asset_id , asset_type ) : return self . asset ( asset_id , asset_type = asset_type , action = 'DELETE' )
Delete the asset with the provided asset_id .
42
10
27,344
def _build_command ( self , python_executable , lib_dir_fq , proxy_enabled ) : exe_command = [ os . path . expanduser ( python_executable ) , '-m' , 'pip' , 'install' , '-r' , self . requirements_file , '--ignore-installed' , '--quiet' , '--target' , lib_dir_fq , ] if self . args . no_cache_dir : exe_command . append ( '--no-cache-dir' ) if proxy_enabled : # trust the pypi hosts to avoid ssl errors trusted_hosts = [ 'pypi.org' , 'pypi.python.org' , 'files.pythonhosted.org' ] for host in trusted_hosts : exe_command . append ( '--trusted-host' ) exe_command . append ( host ) return exe_command
Build the pip command for installing dependencies .
209
8
27,345
def _configure_proxy ( self ) : if os . getenv ( 'HTTP_PROXY' ) or os . getenv ( 'HTTPS_PROXY' ) : # TODO: is this appropriate? # don't change proxy settings if the OS already has them configured. return True proxy_enabled = False if self . args . proxy_host is not None and self . args . proxy_port is not None : if self . args . proxy_user is not None and self . args . proxy_pass is not None : proxy_user = quote ( self . args . proxy_user , safe = '~' ) proxy_pass = quote ( self . args . proxy_pass , safe = '~' ) # proxy url with auth proxy_url = '{}:{}@{}:{}' . format ( proxy_user , proxy_pass , self . args . proxy_host , self . args . proxy_port ) else : # proxy url without auth proxy_url = '{}:{}' . format ( self . args . proxy_host , self . args . proxy_port ) os . putenv ( 'HTTP_PROXY' , 'http://{}' . format ( proxy_url ) ) os . putenv ( 'HTTPS_PROXY' , 'https://{}' . format ( proxy_url ) ) print ( 'Using Proxy Server: {}{}:{}.' . format ( c . Fore . CYAN , self . args . proxy_host , self . args . proxy_port ) ) proxy_enabled = True return proxy_enabled
Configure proxy settings using environment variables .
338
8
27,346
def _create_temp_requirements ( self ) : self . use_temp_requirements_file = True # Replace tcex version with develop branch of tcex with open ( self . requirements_file , 'r' ) as fh : current_requirements = fh . read ( ) . strip ( ) . split ( '\n' ) self . requirements_file = 'temp-{}' . format ( self . requirements_file ) with open ( self . requirements_file , 'w' ) as fh : new_requirements = '' for line in current_requirements : if not line : continue if line . startswith ( 'tcex' ) : line = 'git+https://github.com/ThreatConnect-Inc/tcex.git@{}#egg=tcex' line = line . format ( self . args . branch ) # print('line', line) new_requirements += '{}\n' . format ( line ) fh . write ( new_requirements )
Create a temporary requirements . txt .
224
8
27,347
def _gen_indicator_class ( self ) : for entry in self . tcex . indicator_types_data . values ( ) : name = entry . get ( 'name' ) class_name = name . replace ( ' ' , '' ) # temp fix for API issue where boolean are returned as strings entry [ 'custom' ] = self . tcex . utils . to_bool ( entry . get ( 'custom' ) ) if class_name in globals ( ) : # skip Indicator Type if a class already exists continue # Custom Indicator can have 3 values. Only add the value if it is set. value_fields = [ ] if entry . get ( 'value1Label' ) : value_fields . append ( entry [ 'value1Label' ] ) if entry . get ( 'value2Label' ) : value_fields . append ( entry [ 'value2Label' ] ) if entry . get ( 'value3Label' ) : value_fields . append ( entry [ 'value3Label' ] ) value_count = len ( value_fields ) class_data = { } # Add Class for each Custom Indicator type to this module custom_class = custom_indicator_class_factory ( name , Indicator , class_data , value_fields ) setattr ( module , class_name , custom_class ) # Add Custom Indicator Method self . _gen_indicator_method ( name , custom_class , value_count )
Generate Custom Indicator Classes .
315
7
27,348
def _group ( self , group_data ) : if isinstance ( group_data , dict ) : # get xid from dict xid = group_data . get ( 'xid' ) else : # get xid from object xid = group_data . xid if self . groups . get ( xid ) is not None : # return existing group from memory group_data = self . groups . get ( xid ) elif self . groups_shelf . get ( xid ) is not None : # return existing group from shelf group_data = self . groups_shelf . get ( xid ) else : # store new group self . groups [ xid ] = group_data return group_data
Return previously stored group or new group .
153
8
27,349
def _indicator ( self , indicator_data ) : if isinstance ( indicator_data , dict ) : # get xid from dict xid = indicator_data . get ( 'xid' ) else : # get xid from object xid = indicator_data . xid if self . indicators . get ( xid ) is not None : # return existing indicator from memory indicator_data = self . indicators . get ( xid ) elif self . indicators_shelf . get ( xid ) is not None : # return existing indicator from shelf indicator_data = self . indicators_shelf . get ( xid ) else : # store new indicators self . indicators [ xid ] = indicator_data return indicator_data
Return previously stored indicator or new indicator .
154
8
27,350
def add_indicator ( self , indicator_data ) : if indicator_data . get ( 'type' ) not in [ 'Address' , 'EmailAddress' , 'File' , 'Host' , 'URL' ] : # for custom indicator types the valueX fields are required. # using the summary we can build the values index = 1 for value in self . _indicator_values ( indicator_data . get ( 'summary' ) ) : indicator_data [ 'value{}' . format ( index ) ] = value index += 1 if indicator_data . get ( 'type' ) == 'File' : # convert custom field name to the appropriate value for batch v2 size = indicator_data . pop ( 'size' , None ) if size is not None : indicator_data [ 'intValue1' ] = size if indicator_data . get ( 'type' ) == 'Host' : # convert custom field name to the appropriate value for batch v2 dns_active = indicator_data . pop ( 'dnsActive' , None ) if dns_active is not None : indicator_data [ 'flag1' ] = dns_active whois_active = indicator_data . pop ( 'whoisActive' , None ) if whois_active is not None : indicator_data [ 'flag2' ] = whois_active return self . _indicator ( indicator_data )
Add an indicator to Batch Job .
301
8
27,351
def address ( self , ip , * * kwargs ) : indicator_obj = Address ( ip , * * kwargs ) return self . _indicator ( indicator_obj )
Add Address data to Batch object .
39
8
27,352
def adversary ( self , name , * * kwargs ) : group_obj = Adversary ( name , * * kwargs ) return self . _group ( group_obj )
Add Adversary data to Batch object .
40
10
27,353
def asn ( self , as_number , * * kwargs ) : indicator_obj = ASN ( as_number , * * kwargs ) return self . _indicator ( indicator_obj )
Add ASN data to Batch object .
45
9
27,354
def campaign ( self , name , * * kwargs ) : group_obj = Campaign ( name , * * kwargs ) return self . _group ( group_obj )
Add Campaign data to Batch object .
38
8
27,355
def cidr ( self , block , * * kwargs ) : indicator_obj = CIDR ( block , * * kwargs ) return self . _indicator ( indicator_obj )
Add CIDR data to Batch object .
43
10
27,356
def close ( self ) : self . groups_shelf . close ( ) self . indicators_shelf . close ( ) if self . debug and self . enable_saved_file : fqfn = os . path . join ( self . tcex . args . tc_temp_path , 'xids-saved' ) if os . path . isfile ( fqfn ) : os . remove ( fqfn ) # remove previous file to prevent duplicates with open ( fqfn , 'w' ) as fh : for xid in self . saved_xids : fh . write ( '{}\n' . format ( xid ) ) else : # delete saved files if os . path . isfile ( self . group_shelf_fqfn ) : os . remove ( self . group_shelf_fqfn ) if os . path . isfile ( self . group_shelf_fqfn ) : os . remove ( self . indicator_shelf_fqfn )
Cleanup batch job .
220
5
27,357
def data ( self ) : entity_count = 0 data = { 'group' : [ ] , 'indicator' : [ ] } # process group data group_data , entity_count = self . data_groups ( self . groups , entity_count ) data [ 'group' ] . extend ( group_data ) if entity_count >= self . _batch_max_chunk : return data group_data , entity_count = self . data_groups ( self . groups_shelf , entity_count ) data [ 'group' ] . extend ( group_data ) if entity_count >= self . _batch_max_chunk : return data # process indicator data indicator_data , entity_count = self . data_indicators ( self . indicators , entity_count ) data [ 'indicator' ] . extend ( indicator_data ) if entity_count >= self . _batch_max_chunk : return data indicator_data , entity_count = self . data_indicators ( self . indicators_shelf , entity_count ) data [ 'indicator' ] . extend ( indicator_data ) if entity_count >= self . _batch_max_chunk : return data return data
Return the batch data to be sent to the ThreatConnect API .
257
13
27,358
def data_group_association ( self , xid ) : groups = [ ] group_data = None # get group data from one of the arrays if self . groups . get ( xid ) is not None : group_data = self . groups . get ( xid ) del self . groups [ xid ] elif self . groups_shelf . get ( xid ) is not None : group_data = self . groups_shelf . get ( xid ) del self . groups_shelf [ xid ] if group_data is not None : # convert any obj into dict and process file data group_data = self . data_group_type ( group_data ) groups . append ( group_data ) # recursively get associations for assoc_xid in group_data . get ( 'associatedGroupXid' , [ ] ) : groups . extend ( self . data_group_association ( assoc_xid ) ) return groups
Return group dict array following all associations .
206
8
27,359
def data_group_type ( self , group_data ) : if isinstance ( group_data , dict ) : # process file content file_content = group_data . pop ( 'fileContent' , None ) if file_content is not None : self . _files [ group_data . get ( 'xid' ) ] = { 'fileContent' : file_content , 'type' : group_data . get ( 'type' ) , } else : GROUPS_STRINGS_WITH_FILE_CONTENTS = [ 'Document' , 'Report' ] # process file content if group_data . data . get ( 'type' ) in GROUPS_STRINGS_WITH_FILE_CONTENTS : self . _files [ group_data . data . get ( 'xid' ) ] = group_data . file_data group_data = group_data . data return group_data
Return dict representation of group data .
197
7
27,360
def data_groups ( self , groups , entity_count ) : data = [ ] # process group objects for xid in groups . keys ( ) : # get association from group data assoc_group_data = self . data_group_association ( xid ) data += assoc_group_data entity_count += len ( assoc_group_data ) if entity_count >= self . _batch_max_chunk : break return data , entity_count
Process Group data .
100
4
27,361
def data_indicators ( self , indicators , entity_count ) : data = [ ] # process indicator objects for xid , indicator_data in indicators . items ( ) : entity_count += 1 if isinstance ( indicator_data , dict ) : data . append ( indicator_data ) else : data . append ( indicator_data . data ) del indicators [ xid ] if entity_count >= self . _batch_max_chunk : break return data , entity_count
Process Indicator data .
101
5
27,362
def debug ( self ) : debug = False if os . path . isfile ( os . path . join ( self . tcex . args . tc_temp_path , 'DEBUG' ) ) : debug = True return debug
Return debug setting
48
3
27,363
def document ( self , name , file_name , * * kwargs ) : group_obj = Document ( name , file_name , * * kwargs ) return self . _group ( group_obj )
Add Document data to Batch object .
46
8
27,364
def email ( self , name , subject , header , body , * * kwargs ) : group_obj = Email ( name , subject , header , body , * * kwargs ) return self . _group ( group_obj )
Add Email data to Batch object .
50
8
27,365
def errors ( self , batch_id , halt_on_error = True ) : errors = [ ] try : r = self . tcex . session . get ( '/v2/batch/{}/errors' . format ( batch_id ) ) # if r.status_code == 404: # time.sleep(5) # allow time for errors to be processed # r = self.tcex.session.get('/v2/batch/{}/errors'.format(batch_id)) self . tcex . log . debug ( 'Retrieve Errors for ID {}: status code {}, errors {}' . format ( batch_id , r . status_code , r . text ) ) # self.tcex.log.debug('Retrieve Errors URL {}'.format(r.url)) # API does not return correct content type if r . ok : errors = json . loads ( r . text ) # temporarily process errors to find "critical" errors. # FR in core to return error codes. for error in errors : error_reason = error . get ( 'errorReason' ) for error_msg in self . _critical_failures : if re . findall ( error_msg , error_reason ) : self . tcex . handle_error ( 10500 , [ error_reason ] , halt_on_error ) return errors except Exception as e : self . tcex . handle_error ( 560 , [ e ] , halt_on_error )
Retrieve Batch errors to ThreatConnect API .
319
10
27,366
def event ( self , name , * * kwargs ) : group_obj = Event ( name , * * kwargs ) return self . _group ( group_obj )
Add Event data to Batch object .
38
8
27,367
def file ( self , md5 = None , sha1 = None , sha256 = None , * * kwargs ) : indicator_obj = File ( md5 , sha1 , sha256 , * * kwargs ) return self . _indicator ( indicator_obj )
Add File data to Batch object .
63
8
27,368
def generate_xid ( identifier = None ) : if identifier is None : identifier = str ( uuid . uuid4 ( ) ) elif isinstance ( identifier , list ) : identifier = '-' . join ( [ str ( i ) for i in identifier ] ) identifier = hashlib . sha256 ( identifier . encode ( 'utf-8' ) ) . hexdigest ( ) return hashlib . sha256 ( identifier . encode ( 'utf-8' ) ) . hexdigest ( )
Generate xid from provided identifiers .
108
8
27,369
def group ( self , group_type , name , * * kwargs ) : group_obj = Group ( group_type , name , * * kwargs ) return self . _group ( group_obj )
Add Group data to Batch object .
46
8
27,370
def group_shelf_fqfn ( self ) : if self . _group_shelf_fqfn is None : # new shelf file self . _group_shelf_fqfn = os . path . join ( self . tcex . args . tc_temp_path , 'groups-{}' . format ( str ( uuid . uuid4 ( ) ) ) ) # saved shelf file if self . saved_groups : self . _group_shelf_fqfn = os . path . join ( self . tcex . args . tc_temp_path , 'groups-saved' ) return self . _group_shelf_fqfn
Return groups shelf fully qualified filename .
148
7
27,371
def groups_shelf ( self ) : if self . _groups_shelf is None : self . _groups_shelf = shelve . open ( self . group_shelf_fqfn , writeback = False ) return self . _groups_shelf
Return dictionary of all Groups data .
57
7
27,372
def incident ( self , name , * * kwargs ) : group_obj = Incident ( name , * * kwargs ) return self . _group ( group_obj )
Add Incident data to Batch object .
38
8
27,373
def indicator ( self , indicator_type , summary , * * kwargs ) : indicator_obj = Indicator ( indicator_type , summary , * * kwargs ) return self . _indicator ( indicator_obj )
Add Indicator data to Batch object .
48
9
27,374
def indicator_shelf_fqfn ( self ) : if self . _indicator_shelf_fqfn is None : # new shelf file self . _indicator_shelf_fqfn = os . path . join ( self . tcex . args . tc_temp_path , 'indicators-{}' . format ( str ( uuid . uuid4 ( ) ) ) ) # saved shelf file if self . saved_indicators : self . _indicator_shelf_fqfn = os . path . join ( self . tcex . args . tc_temp_path , 'indicators-saved' ) return self . _indicator_shelf_fqfn
Return indicator shelf fully qualified filename .
155
7
27,375
def indicators_shelf ( self ) : if self . _indicators_shelf is None : self . _indicators_shelf = shelve . open ( self . indicator_shelf_fqfn , writeback = False ) return self . _indicators_shelf
Return dictionary of all Indicator data .
60
8
27,376
def intrusion_set ( self , name , * * kwargs ) : group_obj = IntrusionSet ( name , * * kwargs ) return self . _group ( group_obj )
Add Intrusion Set data to Batch object .
43
11
27,377
def mutex ( self , mutex , * * kwargs ) : indicator_obj = Mutex ( mutex , * * kwargs ) return self . _indicator ( indicator_obj )
Add Mutex data to Batch object .
43
9
27,378
def registry_key ( self , key_name , value_name , value_type , * * kwargs ) : indicator_obj = RegistryKey ( key_name , value_name , value_type , * * kwargs ) return self . _indicator ( indicator_obj )
Add Registry Key data to Batch object .
62
9
27,379
def report ( self , name , * * kwargs ) : group_obj = Report ( name , * * kwargs ) return self . _group ( group_obj )
Add Report data to Batch object .
38
8
27,380
def save ( self , resource ) : resource_type = None xid = None if isinstance ( resource , dict ) : resource_type = resource . get ( 'type' ) xid = resource . get ( 'xid' ) else : resource_type = resource . type xid = resource . xid if resource_type is not None and xid is not None : saved = True if resource_type in self . tcex . group_types : try : # groups self . groups_shelf [ xid ] = resource except Exception : saved = False if saved : try : del self . _groups [ xid ] except KeyError : # if group was saved twice it would already be delete pass elif resource_type in self . tcex . indicator_types_data . keys ( ) : try : # indicators self . indicators_shelf [ xid ] = resource except Exception : saved = False if saved : try : del self . _indicators [ xid ] except KeyError : # if indicator was saved twice it would already be delete pass
Save group|indicator dict or object to shelve .
225
12
27,381
def saved_groups ( self ) : if self . _saved_groups is None : self . _saved_groups = False fqfn_saved = os . path . join ( self . tcex . args . tc_temp_path , 'groups-saved' ) if ( self . enable_saved_file and os . path . isfile ( fqfn_saved ) and os . access ( fqfn_saved , os . R_OK ) ) : self . _saved_groups = True self . tcex . log . debug ( 'groups-saved file found' ) return self . _saved_groups
Return True if saved group files exits else False .
143
10
27,382
def saved_xids ( self ) : if self . _saved_xids is None : self . _saved_xids = [ ] if self . debug : fpfn = os . path . join ( self . tcex . args . tc_temp_path , 'xids-saved' ) if os . path . isfile ( fpfn ) and os . access ( fpfn , os . R_OK ) : with open ( fpfn ) as fh : self . _saved_xids = fh . read ( ) . splitlines ( ) return self . _saved_xids
Return previously saved xids .
137
6
27,383
def settings ( self ) : _settings = { 'action' : self . _action , # not supported in v2 batch # 'attributeWriteType': self._attribute_write_type, 'attributeWriteType' : 'Replace' , 'haltOnError' : str ( self . _halt_on_error ) . lower ( ) , 'owner' : self . _owner , 'version' : 'V2' , } if self . _playbook_triggers_enabled is not None : _settings [ 'playbookTriggersEnabled' ] = str ( self . _playbook_triggers_enabled ) . lower ( ) if self . _hash_collision_mode is not None : _settings [ 'hashCollisionMode' ] = self . _hash_collision_mode if self . _file_merge_mode is not None : _settings [ 'fileMergeMode' ] = self . _file_merge_mode return _settings
Return batch job settings .
212
5
27,384
def signature ( self , name , file_name , file_type , file_text , * * kwargs ) : group_obj = Signature ( name , file_name , file_type , file_text , * * kwargs ) return self . _group ( group_obj )
Add Signature data to Batch object .
62
8
27,385
def submit_files ( self , halt_on_error = True ) : # check global setting for override if self . halt_on_file_error is not None : halt_on_error = self . halt_on_file_error upload_status = [ ] for xid , content_data in self . _files . items ( ) : del self . _files [ xid ] # win or loose remove the entry status = True # used for debug/testing to prevent upload of previously uploaded file if self . debug and xid in self . saved_xids : self . tcex . log . debug ( 'skipping previously saved file {}.' . format ( xid ) ) continue # process the file content content = content_data . get ( 'fileContent' ) if callable ( content ) : content = content_data . get ( 'fileContent' ) ( xid ) if content is None : upload_status . append ( { 'uploaded' : False , 'xid' : xid } ) self . tcex . log . warning ( 'File content was null for xid {}.' . format ( xid ) ) continue if content_data . get ( 'type' ) == 'Document' : api_branch = 'documents' elif content_data . get ( 'type' ) == 'Report' : api_branch = 'reports' # Post File url = '/v2/groups/{}/{}/upload' . format ( api_branch , xid ) headers = { 'Content-Type' : 'application/octet-stream' } params = { 'owner' : self . _owner } r = self . submit_file_content ( 'POST' , url , content , headers , params , halt_on_error ) if r . status_code == 401 : # use PUT method if file already exists self . tcex . log . info ( 'Received 401 status code using POST. Trying PUT to update.' ) r = self . submit_file_content ( 'PUT' , url , content , headers , params , halt_on_error ) self . tcex . log . debug ( '{} Upload URL: {}.' . format ( content_data . get ( 'type' ) , r . url ) ) if not r . ok : status = False self . tcex . handle_error ( 585 , [ r . status_code , r . text ] , halt_on_error ) elif self . debug : self . saved_xids . append ( xid ) self . tcex . log . info ( 'Status {} for file upload with xid {}.' . format ( r . status_code , xid ) ) upload_status . append ( { 'uploaded' : status , 'xid' : xid } ) return upload_status
Submit Files for Documents and Reports to ThreatConnect API .
611
11
27,386
def submit_file_content ( self , method , url , data , headers , params , halt_on_error = True ) : r = None try : r = self . tcex . session . request ( method , url , data = data , headers = headers , params = params ) except Exception as e : self . tcex . handle_error ( 580 , [ e ] , halt_on_error ) return r
Submit File Content for Documents and Reports to ThreatConnect API .
90
12
27,387
def threat ( self , name , * * kwargs ) : group_obj = Threat ( name , * * kwargs ) return self . _group ( group_obj )
Add Threat data to Batch object
38
7
27,388
def user_agent ( self , text , * * kwargs ) : indicator_obj = UserAgent ( text , * * kwargs ) return self . _indicator ( indicator_obj )
Add User Agent data to Batch object
42
8
27,389
def url ( self , text , * * kwargs ) : indicator_obj = URL ( text , * * kwargs ) return self . _indicator ( indicator_obj )
Add URL Address data to Batch object .
39
9
27,390
def write_batch_json ( self , content ) : timestamp = str ( time . time ( ) ) . replace ( '.' , '' ) batch_json_file = os . path . join ( self . tcex . args . tc_temp_path , 'batch-{}.json' . format ( timestamp ) ) with open ( batch_json_file , 'w' ) as fh : json . dump ( content , fh , indent = 2 )
Write batch json data to a file .
99
8
27,391
def attribute_labels ( self , attribute_id , params = None ) : if params is None : params = { } if not self . can_update ( ) : self . _tcex . handle_error ( 910 , [ self . type ] ) for al in self . tc_requests . attribute_labels ( self . api_type , self . api_sub_type , self . unique_id , attribute_id , owner = self . owner , params = params , ) : yield al
Gets the security labels from a attribute
109
8
27,392
def attribute_label ( self , attribute_id , label , action = 'GET' , params = None ) : if params is None : params = { } if not self . can_update ( ) : self . _tcex . handle_error ( 910 , [ self . type ] ) if action == 'GET' : return self . tc_requests . get_attribute_label ( self . api_type , self . api_sub_type , self . unique_id , attribute_id , label , owner = self . owner , params = params , ) if action == 'DELETE' : return self . tc_requests . delete_attribute_label ( self . api_type , self . api_sub_type , self . unique_id , attribute_id , label , owner = self . owner , ) self . _tcex . handle_error ( 925 , [ 'action' , 'attribute_label' , 'action' , 'action' , action ] ) return None
Gets a security labels from a attribute
215
8
27,393
def add_attribute_label ( self , attribute_id , label ) : if not self . can_update ( ) : self . _tcex . handle_error ( 910 , [ self . type ] ) return self . tc_requests . add_attribute_label ( self . api_type , self . api_sub_type , self . unique_id , attribute_id , label , owner = self . owner )
Adds a security labels to a attribute
92
7
27,394
def bundle ( self , bundle_name ) : if self . args . bundle or self . tcex_json . get ( 'package' , { } ) . get ( 'bundle' , False ) : if self . tcex_json . get ( 'package' , { } ) . get ( 'bundle_packages' ) is not None : for bundle in self . tcex_json . get ( 'package' , { } ) . get ( 'bundle_packages' ) or [ ] : bundle_name = bundle . get ( 'name' ) bundle_patterns = bundle . get ( 'patterns' ) bundle_apps = [ ] for app in self . _app_packages : for app_pattern in bundle_patterns : p = re . compile ( app_pattern , re . IGNORECASE ) if p . match ( app ) : bundle_apps . append ( app ) # bundle app in zip if bundle_apps : self . bundle_apps ( bundle_name , bundle_apps ) else : self . bundle_apps ( bundle_name , self . _app_packages )
Bundle multiple Job or Playbook Apps into a single zip file .
239
14
27,395
def commit_hash ( self ) : commit_hash = None branch = None branch_file = '.git/HEAD' # ref: refs/heads/develop # get current branch if os . path . isfile ( branch_file ) : with open ( branch_file , 'r' ) as f : try : branch = f . read ( ) . strip ( ) . split ( '/' ) [ 2 ] except IndexError : pass # get commit hash if branch : hash_file = '.git/refs/heads/{}' . format ( branch ) if os . path . isfile ( hash_file ) : with open ( hash_file , 'r' ) as f : commit_hash = f . read ( ) . strip ( ) return commit_hash
Return the current commit hash if available .
163
8
27,396
def print_results ( self ) : # Updates if self . package_data . get ( 'updates' ) : print ( '\n{}{}Updates:' . format ( c . Style . BRIGHT , c . Fore . BLUE ) ) for p in self . package_data [ 'updates' ] : print ( '{!s:<20}{}{} {!s:<50}' . format ( p . get ( 'action' ) , c . Style . BRIGHT , c . Fore . CYAN , p . get ( 'output' ) ) ) # Packaging print ( '\n{}{}Package:' . format ( c . Style . BRIGHT , c . Fore . BLUE ) ) for p in self . package_data [ 'package' ] : if isinstance ( p . get ( 'output' ) , list ) : n = 5 list_data = p . get ( 'output' ) print ( '{!s:<20}{}{} {!s:<50}' . format ( p . get ( 'action' ) , c . Style . BRIGHT , c . Fore . CYAN , ', ' . join ( p . get ( 'output' ) [ : n ] ) ) ) del list_data [ : n ] for data in [ list_data [ i : i + n ] for i in range ( 0 , len ( list_data ) , n ) # noqa: E203 ] : print ( '{!s:<20}{}{} {!s:<50}' . format ( '' , c . Style . BRIGHT , c . Fore . CYAN , ', ' . join ( data ) ) ) else : print ( '{!s:<20}{}{} {!s:<50}' . format ( p . get ( 'action' ) , c . Style . BRIGHT , c . Fore . CYAN , p . get ( 'output' ) ) ) # Bundle if self . package_data . get ( 'bundle' ) : print ( '\n{}{}Bundle:' . format ( c . Style . BRIGHT , c . Fore . BLUE ) ) for p in self . package_data [ 'bundle' ] : print ( '{!s:<20}{}{} {!s:<50}' . format ( p . get ( 'action' ) , c . Style . BRIGHT , c . Fore . CYAN , p . get ( 'output' ) ) ) # ignore exit code if not self . args . ignore_validation : print ( '\n' ) # separate errors from normal output # print all errors for error in self . package_data . get ( 'errors' ) : print ( '{}{}' . format ( c . Fore . RED , error ) ) self . exit_code = 1
Print results of the package command .
610
7
27,397
def zip_file ( self , app_path , app_name , tmp_path ) : # zip build directory zip_file = os . path . join ( app_path , self . args . outdir , app_name ) zip_file_zip = '{}.zip' . format ( zip_file ) zip_file_tcx = '{}.tcx' . format ( zip_file ) shutil . make_archive ( zip_file , 'zip' , tmp_path , app_name ) shutil . move ( zip_file_zip , zip_file_tcx ) self . _app_packages . append ( zip_file_tcx ) # update package data self . package_data [ 'package' ] . append ( { 'action' : 'App Package:' , 'output' : zip_file_tcx } )
Zip the App with tcex extension .
183
9
27,398
def content_type ( self , data ) : self . _content_type = str ( data ) self . add_header ( 'Content-Type' , str ( data ) )
The Content - Type header value for this request .
38
10
27,399
def set_basic_auth ( self , username , password ) : credentials = str ( b64encode ( '{}:{}' . format ( username , password ) . encode ( 'utf-8' ) ) , 'utf-8' ) self . authorization = 'Basic {}' . format ( credentials )
Manually set basic auth in the header when normal method does not work .
66
15