idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
224,800
def all_solidity_variables_used_as_args ( self ) : if self . _all_solidity_variables_used_as_args is None : self . _all_solidity_variables_used_as_args = self . _explore_functions ( lambda x : self . _explore_func_nodes ( x , self . _solidity_variable_in_internal_calls ) ) return self . _all_solidity_variables_used_as_args
Return the Soldiity variables directly used in a call
113
11
224,801
def is_protected ( self ) : if self . is_constructor : return True conditional_vars = self . all_conditional_solidity_variables_read ( include_loop = False ) args_vars = self . all_solidity_variables_used_as_args ( ) return SolidityVariableComposed ( 'msg.sender' ) in conditional_vars + args_vars
Determine if the function is protected using a check on msg . sender
90
15
224,802
def auth_string ( self ) : username_token = '{username}:{token}' . format ( username = self . username , token = self . token ) b64encoded_string = b64encode ( username_token ) auth_string = 'Token {b64}' . format ( b64 = b64encoded_string ) return auth_string
Authenticate based on username and token which is base64 - encoded
79
13
224,803
def api_related ( self , query ) : url = "{0}/{1}/related/?format=json" . format ( self . base_url , query ) response = requests . get ( url , headers = self . headers , verify = self . verify_ssl ) if response . status_code == 200 : return response . json ( ) else : self . error ( 'Received status code: {0} from Soltra Server. Content:\n{1}' . format ( response . status_code , response . text ) )
Find related objects through SoltraEdge API
115
8
224,804
def tlp_classifiers ( self , name_tlp , val_tlp ) : classifier = { "WHITE" : 0 , "GREEN" : 1 , "AMBER" : 2 , "RED" : 3 } valid = True if classifier [ name_tlp ] > val_tlp : valid = False return valid
Classifier between Cortex and Soltra . Soltra uses name - TLP and Cortex value - TLP
73
21
224,805
def pop_object ( self , element ) : redacted_text = "Redacted. Object contained TLP value higher than allowed." element [ 'id' ] = '' element [ 'url' ] = '' element [ 'type' ] = '' element [ 'tags' ] = [ ] element [ 'etlp' ] = None element [ 'title' ] = redacted_text element [ 'tlpColor' ] = element [ 'tlpColor' ] element [ 'uploaded_on' ] = '' element [ 'uploaded_by' ] = '' element [ 'description' ] = redacted_text element [ 'children_types' ] = [ ] element [ 'summary' ] [ 'type' ] = '' element [ 'summary' ] [ 'value' ] = '' element [ 'summary' ] [ 'title' ] = redacted_text element [ 'summary' ] [ 'description' ] = redacted_text return element
Pop the object element if the object contains an higher TLP then allowed .
197
15
224,806
def __query ( domain , limit = 100 ) : s = check_output ( [ '{}' . format ( os . path . join ( os . path . dirname ( __file__ ) , 'whois.sh' ) ) , '--limit {} {}' . format ( limit , domain ) ] , universal_newlines = True ) return s
Using the shell script to query pdns . cert . at is a hack but python raises an error every time using subprocess functions to call whois . So this hack is avoiding calling whois directly . Ugly but works .
76
47
224,807
def analyze_vba ( self , path ) : try : vba_parser = VBA_Parser_CLI ( path , relaxed = True ) vbaparser_result = vba_parser . process_file_json ( show_decoded_strings = True , display_code = True , hide_attributes = False , vba_code_only = False , show_deobfuscated_code = True , deobfuscate = True ) self . add_result_subsection ( 'Olevba' , vbaparser_result ) except TypeError : self . add_result_subsection ( 'Oletools VBA Analysis failed' , 'Analysis failed due to an filetype error.' 'The file does not seem to be a valid MS-Office ' 'file.' )
Analyze a given sample for malicious vba .
172
10
224,808
def get ( self , ip_address ) : address = ipaddress . ip_address ( ip_address ) if address . version == 6 and self . _metadata . ip_version == 4 : raise ValueError ( 'Error looking up {0}. You attempted to look up ' 'an IPv6 address in an IPv4-only database.' . format ( ip_address ) ) pointer = self . _find_address_in_tree ( address ) return self . _resolve_data_pointer ( pointer ) if pointer else None
Return the record for the ip_address in the MaxMind DB
111
13
224,809
def search ( self , domain , wildcard = True ) : base_url = "https://crt.sh/?q={}&output=json" if wildcard : domain = "%25.{}" . format ( domain ) url = base_url . format ( domain ) ua = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1' req = requests . get ( url , headers = { 'User-Agent' : ua } ) if req . ok : try : content = req . content . decode ( 'utf-8' ) data = json . loads ( content . replace ( '}{' , '},{' ) ) return data except Exception : self . error ( "Error retrieving information." ) return None
Search crt . sh for the given domain .
179
10
224,810
def __search ( self , value , type_attribute ) : results = [ ] if not value : raise EmptySearchtermError for idx , connection in enumerate ( self . misp_connections ) : misp_response = connection . search ( type_attribute = type_attribute , values = value ) # Fixes #94 if isinstance ( self . misp_name , list ) : name = self . misp_name [ idx ] else : name = self . misp_name results . append ( { 'url' : connection . root_url , 'name' : name , 'result' : self . __clean ( misp_response ) } ) return results
Search method call wrapper .
144
5
224,811
def search_tor_node ( self , data_type , data ) : results = [ ] if data_type == 'ip' : results = self . _get_node_from_ip ( data ) elif data_type == 'fqdn' : results = self . _get_node_from_fqdn ( data ) elif data_type == 'domain' : results = self . _get_node_from_domain ( data ) else : pass return { "nodes" : results }
Lookup an artifact to check if it is a known tor exit node .
110
15
224,812
def check ( self , file ) : result = [ ] for rule in self . ruleset : matches = rule . match ( file ) for match in matches : result . append ( str ( match ) ) return result
Checks a given file against all available yara rules
44
11
224,813
def query ( self , domain ) : result = { } try : result = self . pdns . query ( domain ) except : self . error ( 'Exception while querying passiveDNS. Check the domain format.' ) # Clean the datetime problems in order to correct the json serializability clean_result = [ ] for ind , resultset in enumerate ( result ) : if resultset . get ( 'time_first' , None ) : resultset [ 'time_first' ] = resultset . get ( 'time_first' ) . isoformat ( ' ' ) if resultset . get ( 'time_last' , None ) : resultset [ 'time_last' ] = resultset . get ( 'time_last' ) . isoformat ( ' ' ) clean_result . append ( resultset ) return clean_result
The actual query happens here . Time from queries is replaced with isoformat .
179
15
224,814
def query_ip ( self , ip ) : try : result = self . pssl . query ( ip ) except : self . error ( 'Exception during processing with passiveSSL. ' 'Please check the format of ip.' ) # Check for empty result # result is always assigned, self.error exits the function. if not result . get ( ip , None ) : certificates = [ ] else : certificates = list ( result . get ( ip ) . get ( 'certificates' ) ) newresult = { 'ip' : ip , 'certificates' : [ ] } for cert in certificates : newresult [ 'certificates' ] . append ( { 'fingerprint' : cert , 'subject' : result . get ( ip ) . get ( 'subjects' ) . get ( cert ) . get ( 'values' ) [ 0 ] } ) return newresult
Queries Circl . lu Passive SSL for an ip using PyPSSL class . Returns error if nothing is found .
184
25
224,815
def query_certificate ( self , cert_hash ) : try : cquery = self . pssl . query_cert ( cert_hash ) except Exception : self . error ( 'Exception during processing with passiveSSL. ' 'This happens if the given hash is not sha1 or contains dashes/colons etc. ' 'Please make sure to submit a clean formatted sha1 hash.' ) # fetch_cert raises an error if no certificate was found. try : cfetch = self . pssl . fetch_cert ( cert_hash , make_datetime = False ) except Exception : cfetch = { } return { 'query' : cquery , 'cert' : cfetch }
Queries Circl . lu Passive SSL for a certificate hash using PyPSSL class . Returns error if nothing is found .
146
26
224,816
def _get_level ( current_level , new_intention ) : intention_level_map = OrderedDict ( [ ( 'info' , 'info' ) , ( 'benign' , 'safe' ) , ( 'suspicious' , 'suspicious' ) , ( 'malicious' , 'malicious' ) ] ) levels = intention_level_map . values ( ) new_level = intention_level_map . get ( new_intention , 'info' ) new_index = levels . index ( new_level ) try : current_index = levels . index ( current_level ) except ValueError : # There is no existing level current_index = - 1 return new_level if new_index > current_index else current_level
Map GreyNoise intentions to Cortex maliciousness levels . Accept a Cortex level and a GreyNoise intention the return the more malicious of the two .
162
30
224,817
def summary ( self , raw ) : try : taxonomies = [ ] if raw . get ( 'records' ) : final_level = None taxonomy_data = defaultdict ( int ) for record in raw . get ( 'records' , [ ] ) : name = record . get ( 'name' , 'unknown' ) intention = record . get ( 'intention' , 'unknown' ) taxonomy_data [ name ] += 1 final_level = self . _get_level ( final_level , intention ) if len ( taxonomy_data ) > 1 : # Multiple tags have been found taxonomies . append ( self . build_taxonomy ( final_level , 'GreyNoise' , 'entries' , len ( taxonomy_data ) ) ) else : # There is only one tag found, possibly multiple times for name , count in taxonomy_data . iteritems ( ) : taxonomies . append ( self . build_taxonomy ( final_level , 'GreyNoise' , name , count ) ) else : taxonomies . append ( self . build_taxonomy ( 'info' , 'GreyNoise' , 'Records' , 'None' ) ) return { "taxonomies" : taxonomies } except Exception as e : self . error ( 'Summary failed\n{}' . format ( e . message ) )
Return one taxonomy summarizing the reported tags If there is only one tag use it as the predicate If there are multiple tags use entries as the predicate Use the total count as the value Use the most malicious level found
294
43
224,818
def scan_file ( self , this_file ) : params = { 'apikey' : self . api_key } try : if type ( this_file ) == str and os . path . isfile ( this_file ) : files = { 'file' : ( this_file , open ( this_file , 'rb' ) ) } elif isinstance ( this_file , StringIO . StringIO ) : files = { 'file' : this_file . read ( ) } else : files = { 'file' : this_file } except TypeError as e : return dict ( error = e . message ) try : response = requests . post ( self . base + 'file/scan' , files = files , params = params , proxies = self . proxies ) except requests . RequestException as e : return dict ( error = e . message ) return _return_response_and_status_code ( response )
Submit a file to be scanned by VirusTotal
197
9
224,819
def scan_url ( self , this_url ) : params = { 'apikey' : self . api_key , 'url' : this_url } try : response = requests . post ( self . base + 'url/scan' , params = params , proxies = self . proxies ) except requests . RequestException as e : return dict ( error = e . message ) return _return_response_and_status_code ( response )
Submit a URL to be scanned by VirusTotal .
94
10
224,820
def get_file ( self , this_hash ) : params = { 'apikey' : self . api_key , 'hash' : this_hash } try : response = requests . get ( self . base + 'file/download' , params = params , proxies = self . proxies ) except requests . RequestException as e : return dict ( error = e . message ) if response . status_code == requests . codes . ok : return response . content elif response . status_code == 403 : return dict ( error = 'You tried to perform calls to functions for which you require a Private API key.' , response_code = response . status_code ) elif response . status_code == 404 : return dict ( error = 'File not found.' , response_code = response . status_code ) else : return dict ( response_code = response . status_code )
Download a file by its hash .
186
7
224,821
def get_url_report ( self , this_url , scan = '0' , allinfo = 1 ) : params = { 'apikey' : self . api_key , 'resource' : this_url , 'scan' : scan , 'allinfo' : allinfo } try : response = requests . get ( self . base + 'url/report' , params = params , proxies = self . proxies ) except requests . RequestException as e : return dict ( error = e . message ) return _return_response_and_status_code ( response )
Get the scan results for a URL .
121
8
224,822
def get_comments ( self , resource , before = None ) : params = dict ( apikey = self . api_key , resource = resource , before = before ) try : response = requests . get ( self . base + 'comments/get' , params = params , proxies = self . proxies ) except requests . RequestException as e : return dict ( error = e . message ) return _return_response_and_status_code ( response )
Get comments for a file or URL .
95
8
224,823
def save_downloaded_file ( filename , save_file_at , file_stream ) : filename = os . path . join ( save_file_at , filename ) with open ( filename , 'wb' ) as f : f . write ( file_stream ) f . flush ( )
Save Downloaded File to Disk Helper Function
62
9
224,824
def name ( self ) : # pylint:disable=E1101 return next ( ( self . names . get ( x ) for x in self . _locales if x in self . names ) , None )
Dict with locale codes as keys and localized name as value
46
12
224,825
def summary ( self , raw ) : taxonomies = [ ] level = "info" namespace = "Patrowl" # getreport service if self . service == 'getreport' : if 'risk_level' in raw and raw [ 'risk_level' ] : risk_level = raw [ 'risk_level' ] # Grade if risk_level [ 'grade' ] in [ "A" , "B" ] : level = "safe" else : level = "suspicious" taxonomies . append ( self . build_taxonomy ( level , namespace , "Grade" , risk_level [ 'grade' ] ) ) # Findings if risk_level [ 'high' ] > 0 : level = "malicious" elif risk_level [ 'medium' ] > 0 or risk_level [ 'low' ] > 0 : level = "suspicious" else : level = "info" taxonomies . append ( self . build_taxonomy ( level , namespace , "Findings" , "{}/{}/{}/{}" . format ( risk_level [ 'high' ] , risk_level [ 'medium' ] , risk_level [ 'low' ] , risk_level [ 'info' ] ) ) ) #todo: add_asset service return { "taxonomies" : taxonomies }
Parse format and return scan summary .
289
8
224,826
def run ( self ) : try : if self . service == 'getreport' : service_url = '{}/assets/api/v1/details/{}' . format ( self . url , self . get_data ( ) ) headers = { 'Authorization' : 'token {}' . format ( self . api_key ) } response = requests . get ( service_url , headers = headers ) self . report ( response . json ( ) ) else : self . error ( 'Unknown Patrowl service' ) except Exception as e : self . unexpectedError ( e )
Run the analyzer .
125
5
224,827
def run ( self ) : kwargs = { 'query' : self . get_data ( ) } if self . data_type == "ip" : kwargs . update ( { 'query_type' : 'ip' } ) elif self . data_type == "network" : kwargs . update ( { 'query_type' : 'network' } ) elif self . data_type == 'autonomous-system' : kwargs . update ( { 'query_type' : 'asn' } ) elif self . data_type == 'port' : kwargs . update ( { 'query_type' : 'port' } ) else : self . notSupported ( ) return False if self . service == 'observations' : response = self . bs . get_observations ( * * kwargs ) self . report ( response ) elif self . service == 'enrichment' : response = self . bs . enrich ( * * kwargs ) self . report ( response ) else : self . report ( { 'error' : 'Invalid service defined.' } )
Run the process to get observation data from Backscatter . io .
243
14
224,828
def summary ( self , raw ) : taxonomies = list ( ) level = 'info' namespace = 'Backscatter.io' if self . service == 'observations' : summary = raw . get ( 'results' , dict ( ) ) . get ( 'summary' , dict ( ) ) taxonomies = taxonomies + [ self . build_taxonomy ( level , namespace , 'Observations' , summary . get ( 'observations_count' , 0 ) ) , self . build_taxonomy ( level , namespace , 'IP Addresses' , summary . get ( 'ip_address_count' , 0 ) ) , self . build_taxonomy ( level , namespace , 'Networks' , summary . get ( 'network_count' , 0 ) ) , self . build_taxonomy ( level , namespace , 'AS' , summary . get ( 'autonomous_system_count' , 0 ) ) , self . build_taxonomy ( level , namespace , 'Ports' , summary . get ( 'port_count' , 0 ) ) , self . build_taxonomy ( level , namespace , 'Protocols' , summary . get ( 'protocol_count' , 0 ) ) ] elif self . service == 'enrichment' : summary = raw . get ( 'results' , dict ( ) ) if self . data_type == 'ip' : taxonomies = taxonomies + [ self . build_taxonomy ( level , namespace , 'Network' , summary . get ( 'network' ) ) , self . build_taxonomy ( level , namespace , 'Network Broadcast' , summary . get ( 'network_broadcast' ) ) , self . build_taxonomy ( level , namespace , 'Network Size' , summary . get ( 'network_size' ) ) , self . build_taxonomy ( level , namespace , 'Country' , summary . get ( 'country_name' ) ) , self . build_taxonomy ( level , namespace , 'AS Number' , summary . get ( 'as_num' ) ) , self . build_taxonomy ( level , namespace , 'AS Name' , summary . get ( 'as_name' ) ) , ] elif self . data_type == 'network' : taxonomies = taxonomies + [ self . build_taxonomy ( level , namespace , 'Network Size' , summary . get ( 'network_size' ) ) ] elif self . data_type == 'autonomous-system' : taxonomies = taxonomies + [ self . build_taxonomy ( level , namespace , 'Prefix Count' , summary . get ( 'prefix_count' ) ) , self . build_taxonomy ( level , namespace , 'AS Number' , summary . get ( 'as_num' ) ) , self . build_taxonomy ( level , namespace , 'AS Name' , summary . get ( 'as_name' ) ) ] elif self . data_type == 'port' : for result in raw . get ( 'results' , list ( ) ) : display = "%s (%s)" % ( result . get ( 'service' ) , result . get ( 'protocol' ) ) taxonomies . append ( self . build_taxonomy ( level , namespace , 'Service' , display ) ) else : pass else : pass return { "taxonomies" : taxonomies }
Use the Backscatter . io summary data to create a view .
732
14
224,829
def decode ( self , offset ) : new_offset = offset + 1 ( ctrl_byte , ) = struct . unpack ( b'!B' , self . _buffer [ offset : new_offset ] ) type_num = ctrl_byte >> 5 # Extended type if not type_num : ( type_num , new_offset ) = self . _read_extended ( new_offset ) ( size , new_offset ) = self . _size_from_ctrl_byte ( ctrl_byte , new_offset , type_num ) return self . _type_decoder [ type_num ] ( self , size , new_offset )
Decode a section of the data section starting at offset
141
11
224,830
def get_sample ( self , samplehash ) : apiurl = '/rest/sample/' if len ( samplehash ) == 32 : # MD5 apiurl += 'md5/' elif len ( samplehash ) == 40 : # SHA1 apiurl += 'sha1/' elif len ( samplehash ) == 64 : # SHA256 apiurl += 'sha256/' else : raise UnknownHashTypeError ( 'Sample hash has an unknown length.' ) res = self . session . get ( self . url + apiurl + samplehash ) if res . status_code == 200 : return json . loads ( res . text ) else : raise BadResponseError ( 'Response from VMRay was not HTTP 200.' ' Responsecode: {}; Text: {}' . format ( res . status_code , res . text ) )
Downloads information about a sample using a given hash .
179
11
224,831
def submit_sample ( self , filepath , filename , tags = [ 'TheHive' ] ) : apiurl = '/rest/sample/submit?sample_file' params = { 'sample_filename_b64enc' : base64 . b64encode ( filename . encode ( 'utf-8' ) ) , 'reanalyze' : self . reanalyze } if tags : params [ 'tags' ] = ',' . join ( tags ) if os . path . isfile ( filepath ) : res = self . session . post ( url = self . url + apiurl , files = [ ( 'sample_file' , open ( filepath , mode = 'rb' ) ) ] , params = params ) if res . status_code == 200 : return json . loads ( res . text ) else : raise BadResponseError ( 'Response from VMRay was not HTTP 200.' ' Responsecode: {}; Text: {}' . format ( res . status_code , res . text ) ) else : raise SampleFileNotFoundError ( 'Given sample file was not found.' )
Uploads a new sample to VMRay api . Filename gets sent base64 encoded .
236
19
224,832
def build_results ( self , results ) : self . add_result_subsection ( 'Exploit mitigation techniques' , { 'level' : results . get ( 'Plugins' , { } ) . get ( 'mitigation' , { } ) . get ( 'level' , None ) , 'summary' : results . get ( 'Plugins' , { } ) . get ( 'mitigation' , { } ) . get ( 'summary' , None ) , 'content' : results . get ( 'Plugins' , { } ) . get ( 'mitigation' , { } ) . get ( 'plugin_output' , None ) } ) self . add_result_subsection ( 'Suspicious strings' , { 'level' : results . get ( 'Plugins' , { } ) . get ( 'strings' , { } ) . get ( 'level' , None ) , 'summary' : results . get ( 'Plugins' , { } ) . get ( 'strings' , { } ) . get ( 'summary' , None ) , 'content' : results . get ( 'Plugins' , { } ) . get ( 'strings' , { } ) . get ( 'plugin_output' , None ) } ) self . add_result_subsection ( 'Suspicious imports' , { 'level' : results . get ( 'Plugins' , { } ) . get ( 'imports' , { } ) . get ( 'level' , None ) , 'summary' : results . get ( 'Plugins' , { } ) . get ( 'imports' , { } ) . get ( 'summary' , None ) , 'content' : results . get ( 'Plugins' , { } ) . get ( 'imports' , { } ) . get ( 'plugin_output' , None ) } ) self . add_result_subsection ( 'Packer' , { 'level' : results . get ( 'Plugins' , { } ) . get ( 'packer' , { } ) . get ( 'level' , None ) , 'summary' : results . get ( 'Plugins' , { } ) . get ( 'packer' , { } ) . get ( 'summary' , None ) , 'content' : results . get ( 'Plugins' , { } ) . get ( 'packer' , { } ) . get ( 'plugin_output' , None ) } ) self . add_result_subsection ( 'Clamav' , { 'level' : results . get ( 'Plugins' , { } ) . get ( 'clamav' , { } ) . get ( 'level' , None ) , 'summary' : results . get ( 'Plugins' , { } ) . get ( 'clamav' , { } ) . get ( 'summary' , None ) , 'content' : results . get ( 'Plugins' , { } ) . get ( 'clamav' , { } ) . get ( 'plugin_output' , None ) } ) self . add_result_subsection ( 'Manalyze raw output' , json . dumps ( results , indent = 4 ) )
Properly format the results
690
6
224,833
def v4_int_to_packed ( address ) : if address > _BaseV4 . _ALL_ONES : raise ValueError ( 'Address too large for IPv4' ) return Bytes ( struct . pack ( '!I' , address ) )
The binary representation of this address .
55
7
224,834
def _get_prefix_length ( number1 , number2 , bits ) : for i in range ( bits ) : if number1 >> i == number2 >> i : return bits - i return 0
Get the number of leading bits that are same for two numbers .
42
13
224,835
def _prefix_from_ip_int ( self , ip_int ) : prefixlen = self . _max_prefixlen while prefixlen : if ip_int & 1 : break ip_int >>= 1 prefixlen -= 1 if ip_int == ( 1 << prefixlen ) - 1 : return prefixlen else : raise NetmaskValueError ( 'Bit pattern does not match /1*0*/' )
Return prefix length from a bitwise netmask .
87
10
224,836
def _prefix_from_prefix_string ( self , prefixlen_str ) : try : if not _BaseV4 . _DECIMAL_DIGITS . issuperset ( prefixlen_str ) : raise ValueError prefixlen = int ( prefixlen_str ) if not ( 0 <= prefixlen <= self . _max_prefixlen ) : raise ValueError except ValueError : raise NetmaskValueError ( '%s is not a valid prefix length' % prefixlen_str ) return prefixlen
Turn a prefix length string into an integer .
109
9
224,837
def masked ( self ) : return IPNetwork ( '%s/%d' % ( self . network , self . _prefixlen ) , version = self . _version )
Return the network object with the host bits masked out .
37
11
224,838
def _string_from_ip_int ( self , ip_int = None ) : if not ip_int and ip_int != 0 : ip_int = int ( self . _ip ) if ip_int > self . _ALL_ONES : raise ValueError ( 'IPv6 address is too large' ) hex_str = '%032x' % ip_int hextets = [ ] for x in range ( 0 , 32 , 4 ) : hextets . append ( '%x' % int ( hex_str [ x : x + 4 ] , 16 ) ) hextets = self . _compress_hextets ( hextets ) return ':' . join ( hextets )
Turns a 128 - bit integer into hexadecimal notation .
154
14
224,839
def scan_file ( self , this_file , this_filename ) : params = { 'api_key' : self . api_key , 'filename' : this_filename } try : files = { 'file' : ( this_file . name , open ( this_file . name , 'rb' ) , 'application/octet-stream' ) } except TypeError as e : return dict ( error = e . message ) try : response = requests . post ( self . base + 'file/upload' , files = files , data = params ) except requests . RequestException as e : return dict ( error = e . message ) return _return_response_and_status_code ( response )
Submit a file to be scanned by Malwares
150
10
224,840
def __prepare_body ( self , search_value , search_type = 'url' ) : body = { 'client' : { 'clientId' : self . client_id , 'clientVersion' : self . client_version } } if search_type == 'url' : data = { 'threatTypes' : [ 'MALWARE' , 'SOCIAL_ENGINEERING' , 'UNWANTED_SOFTWARE' , 'POTENTIALLY_HARMFUL_APPLICATION' ] , 'platformTypes' : [ 'ANY_PLATFORM' , 'ALL_PLATFORMS' , 'WINDOWS' , 'LINUX' , 'OSX' , 'ANDROID' , 'IOS' ] , 'threatEntryTypes' : [ 'URL' ] } elif search_type == 'ip' : data = { 'threatTypes' : [ 'MALWARE' ] , 'platformTypes' : [ 'WINDOWS' , 'LINUX' , 'OSX' ] , 'threatEntryTypes' : [ 'IP_RANGE' ] } else : raise SearchTypeNotSupportedError ( 'Currently supported search types are \'url\' and \'ip\'.' ) # TODO: Only found threatEntry 'url' in the docs. What to use for ip_range? data [ 'threatEntries' ] = [ { 'url' : search_value } ] body [ 'threatInfo' ] = data return body
Prepares the http body for querying safebrowsing api . Maybe the list need to get adjusted .
314
22
224,841
def query_rpdns ( self ) : results = requests . get ( 'https://freeapi.robtex.com/pdns/reverse/{}' . format ( self . get_data ( ) ) ) . text . split ( '\r\n' ) jsonresults = [ ] for idx , r in enumerate ( results ) : if len ( r ) > 0 : jsonresults . append ( json . loads ( r ) ) return jsonresults
Queries robtex reverse pdns - api using an ip as parameter
99
15
224,842
def module_summary ( self ) : suspicious = 0 malicious = 0 count = 0 cve = False taxonomies = [ ] for section in self . results : if section [ 'submodule_section_content' ] [ 'class' ] == 'malicious' : malicious += 1 elif section [ 'submodule_section_content' ] [ 'class' ] == 'suspicious' : suspicious += 1 if 'CVE' in section [ 'submodule_section_content' ] [ 'clsid_description' ] : cve = True count += 1 if malicious > 0 : taxonomies . append ( self . build_taxonomy ( 'malicious' , 'FileInfo' , 'MaliciousRTFObjects' , malicious ) ) if suspicious > 0 : taxonomies . append ( self . build_taxonomy ( 'suspicious' , 'FileInfo' , 'SuspiciousRTFObjects' , suspicious ) ) if cve : taxonomies . append ( self . build_taxonomy ( 'malicious' , 'FileInfo' , 'PossibleCVEExploit' , 'True' ) ) taxonomies . append ( self . build_taxonomy ( 'info' , 'FileInfo' , 'RTFObjects' , count ) ) self . summary [ 'taxonomies' ] = taxonomies return self . summary
Count the malicious and suspicious sections check for CVE description
290
10
224,843
def search_tor_node ( self , ip ) : data = { } tmp = { } present = datetime . utcnow ( ) . replace ( tzinfo = pytz . utc ) for line in self . _get_raw_data ( ) . splitlines ( ) : params = line . split ( ' ' ) if params [ 0 ] == 'ExitNode' : tmp [ 'node' ] = params [ 1 ] elif params [ 0 ] == 'ExitAddress' : tmp [ 'last_status' ] = params [ 2 ] + 'T' + params [ 3 ] + '+0000' last_status = parse ( tmp [ 'last_status' ] ) if ( self . delta is None or ( present - last_status ) < self . delta ) : data [ params [ 1 ] ] = tmp tmp = { } else : pass return data . get ( ip , { } )
Lookup an IP address to check if it is a known tor exit node .
194
16
224,844
def search_hosts ( self , ip ) : c = CensysIPv4 ( api_id = self . __uid , api_secret = self . __api_key ) return c . view ( ip )
Searches for a host using its ipv4 address
47
12
224,845
def search_certificate ( self , hash ) : c = CensysCertificates ( api_id = self . __uid , api_secret = self . __api_key ) return c . view ( hash )
Searches for a specific certificate using its hash
47
10
224,846
def get_data ( self , datatype , data ) : result = { } params = StopforumspamClient . _set_payload ( datatype , data ) response = self . client . get ( 'https://api.stopforumspam.org/api' , params = params , proxies = self . proxies ) response . raise_for_status ( ) report = response . json ( ) if report [ 'success' ] : data = report [ StopforumspamClient . _type_conversion [ datatype ] ] result = self . _data_conversion ( data ) else : pass return result
Look for an IP address or an email address in the spammer database .
133
15
224,847
def construct ( cls , faker , path_to_factories = None ) : factory = faker . __class__ ( ) if path_to_factories is not None and os . path . isdir ( path_to_factories ) : for filename in os . listdir ( path_to_factories ) : if os . path . isfile ( filename ) : cls . _resolve ( path_to_factories , filename ) return factory
Create a new factory container .
100
6
224,848
def define ( self , klass , name = "default" ) : def decorate ( func ) : @ wraps ( func ) def wrapped ( * args , * * kwargs ) : return func ( * args , * * kwargs ) self . register ( klass , func , name = name ) return wrapped return decorate
Define a class with a given set of attributes .
70
11
224,849
def create_as ( self , klass , name , * * attributes ) : return self . of ( klass , name ) . create ( * * attributes )
Create an instance of the given model and type and persist it to the database .
34
16
224,850
def make_as ( self , klass , name , * * attributes ) : return self . of ( klass , name ) . make ( * * attributes )
Create an instance of the given model and type .
34
10
224,851
def of ( self , klass , name = "default" ) : return FactoryBuilder ( klass , name , self . _definitions , self . _faker , self . _resolver )
Create a builder for the given model .
42
8
224,852
def build ( self , klass , name = "default" , amount = None ) : if amount is None : if isinstance ( name , int ) : amount = name name = "default" else : amount = 1 return self . of ( klass , name ) . times ( amount )
Makes a factory builder with a specified amount .
61
10
224,853
def _get_renamed_diff ( self , blueprint , command , column , schema ) : table_diff = self . _get_table_diff ( blueprint , schema ) return self . _set_renamed_columns ( table_diff , command , column )
Get a new column instance with the new column name .
57
11
224,854
def _set_renamed_columns ( self , table_diff , command , column ) : new_column = Column ( command . to , column . get_type ( ) , column . to_dict ( ) ) table_diff . renamed_columns = { command . from_ : new_column } return table_diff
Set the renamed columns on the table diff .
70
9
224,855
def _get_command_by_name ( self , blueprint , name ) : commands = self . _get_commands_by_name ( blueprint , name ) if len ( commands ) : return commands [ 0 ]
Get the primary key command it it exists .
46
9
224,856
def _get_commands_by_name ( self , blueprint , name ) : return list ( filter ( lambda value : value . name == name , blueprint . get_commands ( ) ) )
Get all of the commands with a given name .
42
10
224,857
def prefix_list ( self , prefix , values ) : return list ( map ( lambda value : prefix + " " + value , values ) )
Add a prefix to a list of values .
30
9
224,858
def _get_default_value ( self , value ) : if isinstance ( value , QueryExpression ) : return value if isinstance ( value , bool ) : return "'%s'" % int ( value ) return "'%s'" % value
Format a value so that it can be used in default clauses .
52
13
224,859
def _get_changed_diff ( self , blueprint , schema ) : table = schema . list_table_details ( self . get_table_prefix ( ) + blueprint . get_table ( ) ) return Comparator ( ) . diff_table ( table , self . _get_table_with_column_changes ( blueprint , table ) )
Get the table diffrence for the given changes .
73
10
224,860
def _get_table_with_column_changes ( self , blueprint , table ) : table = table . clone ( ) for fluent in blueprint . get_changed_columns ( ) : column = self . _get_column_for_change ( table , fluent ) for key , value in fluent . get_attributes ( ) . items ( ) : option = self . _map_fluent_option ( key ) if option is not None : method = "set_%s" % option if hasattr ( column , method ) : getattr ( column , method ) ( self . _map_fluent_value ( option , value ) ) return table
Get a copy of the given table after making the column changes .
139
13
224,861
def _get_column_for_change ( self , table , fluent ) : return table . change_column ( fluent . name , self . _get_column_change_options ( fluent ) ) . get_column ( fluent . name )
Get the column instance for a column change .
51
9
224,862
def _get_delete_query ( self ) : foreign = self . get_attribute ( self . __foreign_key ) query = self . new_query ( ) . where ( self . __foreign_key , foreign ) return query . where ( self . __other_key , self . get_attribute ( self . __other_key ) )
Get the query builder for a delete operation on the pivot .
73
12
224,863
def set_pivot_keys ( self , foreign_key , other_key ) : self . __foreign_key = foreign_key self . __other_key = other_key return self
Set the key names for the pivot model instance
41
9
224,864
def create ( self , * * attributes ) : results = self . make ( * * attributes ) if self . _amount == 1 : if self . _resolver : results . set_connection_resolver ( self . _resolver ) results . save ( ) else : if self . _resolver : results . each ( lambda r : r . set_connection_resolver ( self . _resolver ) ) for result in results : result . save ( ) return results
Create a collection of models and persist them to the database .
99
12
224,865
def make ( self , * * attributes ) : if self . _amount == 1 : return self . _make_instance ( * * attributes ) else : results = [ ] for _ in range ( self . _amount ) : results . append ( self . _make_instance ( * * attributes ) ) return Collection ( results )
Create a collection of models .
68
6
224,866
def _make_instance ( self , * * attributes ) : definition = self . _definitions [ self . _klass ] [ self . _name ] ( self . _faker ) definition . update ( attributes ) instance = self . _klass ( ) instance . force_fill ( * * definition ) return instance
Make an instance of the model with the given attributes .
67
11
224,867
def run ( wrapped ) : @ wraps ( wrapped ) def _run ( self , query , bindings = None , * args , * * kwargs ) : self . _reconnect_if_missing_connection ( ) start = time . time ( ) try : result = wrapped ( self , query , bindings , * args , * * kwargs ) except Exception as e : result = self . _try_again_if_caused_by_lost_connection ( e , query , bindings , wrapped ) t = self . _get_elapsed_time ( start ) self . log_query ( query , bindings , t ) return result return _run
Special decorator encapsulating query method .
138
8
224,868
def where_pivot ( self , column , operator = None , value = None , boolean = "and" ) : self . _pivot_wheres . append ( [ column , operator , value , boolean ] ) return self . _query . where ( "%s.%s" % ( self . _table , column ) , operator , value , boolean )
Set a where clause for a pivot table column .
76
10
224,869
def or_where_pivot ( self , column , operator = None , value = None ) : return self . where_pivot ( column , operator , value , "or" )
Set an or where clause for a pivot table column .
39
11
224,870
def first ( self , columns = None ) : self . _query . take ( 1 ) results = self . get ( columns ) if len ( results ) > 0 : return results . first ( ) return
Execute the query and get the first result .
42
10
224,871
def first_or_fail ( self , columns = None ) : model = self . first ( columns ) if model is not None : return model raise ModelNotFound ( self . _parent . __class__ )
Execute the query and get the first result or raise an exception .
44
14
224,872
def _hydrate_pivot_relation ( self , models ) : for model in models : pivot = self . new_existing_pivot ( self . _clean_pivot_attributes ( model ) ) model . set_relation ( "pivot" , pivot )
Hydrate the pivot table relationship on the models .
58
10
224,873
def touch ( self ) : key = self . get_related ( ) . get_key_name ( ) columns = self . get_related_fresh_update ( ) ids = self . get_related_ids ( ) if len ( ids ) > 0 : self . get_related ( ) . new_query ( ) . where_in ( key , ids ) . update ( columns )
Touch all of the related models of the relationship .
85
10
224,874
def get_related_ids ( self ) : related = self . get_related ( ) full_key = related . get_qualified_key_name ( ) return self . get_query ( ) . select ( full_key ) . lists ( related . get_key_name ( ) )
Get all of the IDs for the related models .
62
10
224,875
def save_many ( self , models , joinings = None ) : if joinings is None : joinings = { } for key , model in enumerate ( models ) : self . save ( model , joinings . get ( key ) , False ) self . touch_if_touching ( ) return models
Save a list of new models and attach them to the parent model
65
13
224,876
def first_or_create ( self , _attributes = None , _joining = None , _touch = True , * * attributes ) : if _attributes is not None : attributes . update ( _attributes ) instance = self . _query . where ( attributes ) . first ( ) if instance is None : instance = self . create ( attributes , _joining or { } , _touch ) return instance
Get the first related model record matching the attributes or create it .
85
13
224,877
def sync ( self , ids , detaching = True ) : changes = { "attached" : [ ] , "detached" : [ ] , "updated" : [ ] } if isinstance ( ids , Collection ) : ids = ids . model_keys ( ) current = self . _new_pivot_query ( ) . lists ( self . _other_key ) . all ( ) records = self . _format_sync_list ( ids ) detach = [ x for x in current if x not in records . keys ( ) ] if detaching and len ( detach ) > 0 : self . detach ( detach ) changes [ "detached" ] = detach changes . update ( self . _attach_new ( records , current , False ) ) if len ( changes [ "attached" ] ) or len ( changes [ "updated" ] ) : self . touch_if_touching ( ) return changes
Sync the intermediate tables with a list of IDs or collection of models
198
13
224,878
def _format_sync_list ( self , records ) : results = { } for attributes in records : if not isinstance ( attributes , dict ) : id , attributes = attributes , { } else : id = list ( attributes . keys ( ) ) [ 0 ] attributes = attributes [ id ] results [ id ] = attributes return results
Format the sync list so that it is keyed by ID .
69
13
224,879
def attach ( self , id , attributes = None , touch = True ) : if isinstance ( id , orator . orm . Model ) : id = id . get_key ( ) query = self . new_pivot_statement ( ) if not isinstance ( id , list ) : id = [ id ] query . insert ( self . _create_attach_records ( id , attributes ) ) if touch : self . touch_if_touching ( )
Attach a model to the parent .
99
7
224,880
def _create_attach_records ( self , ids , attributes ) : records = [ ] timed = self . _has_pivot_column ( self . created_at ( ) ) or self . _has_pivot_column ( self . updated_at ( ) ) for key , value in enumerate ( ids ) : records . append ( self . _attacher ( key , value , attributes , timed ) ) return records
Create a list of records to insert into the pivot table .
93
12
224,881
def _attacher ( self , key , value , attributes , timed ) : id , extra = self . _get_attach_id ( key , value , attributes ) record = self . _create_attach_record ( id , timed ) if extra : record . update ( extra ) return record
Create a full attachment record payload .
61
7
224,882
def _get_attach_id ( self , key , value , attributes ) : if isinstance ( value , dict ) : key = list ( value . keys ( ) ) [ 0 ] attributes . update ( value [ key ] ) return [ key , attributes ] return value , attributes
Get the attach record ID and extra attributes .
58
9
224,883
def _set_timestamps_on_attach ( self , record , exists = False ) : fresh = self . _parent . fresh_timestamp ( ) if not exists and self . _has_pivot_column ( self . created_at ( ) ) : record [ self . created_at ( ) ] = fresh if self . _has_pivot_column ( self . updated_at ( ) ) : record [ self . updated_at ( ) ] = fresh return record
Set the creation an update timestamps on an attach record .
103
13
224,884
def detach ( self , ids = None , touch = True ) : if isinstance ( ids , orator . orm . model . Model ) : ids = ids . get_key ( ) if ids is None : ids = [ ] query = self . _new_pivot_query ( ) if not isinstance ( ids , list ) : ids = [ ids ] if len ( ids ) > 0 : query . where_in ( self . _other_key , ids ) if touch : self . touch_if_touching ( ) results = query . delete ( ) return results
Detach models from the relationship .
133
7
224,885
def touch_if_touching ( self ) : if self . _touching_parent ( ) : self . get_parent ( ) . touch ( ) if self . get_parent ( ) . touches ( self . _relation_name ) : self . touch ( )
Touch if the parent model is being touched .
57
9
224,886
def with_pivot ( self , * columns ) : columns = list ( columns ) self . _pivot_columns += columns return self
Set the columns on the pivot table to retrieve .
30
10
224,887
def with_timestamps ( self , created_at = None , updated_at = None ) : if not created_at : created_at = self . created_at ( ) if not updated_at : updated_at = self . updated_at ( ) return self . with_pivot ( created_at , updated_at )
Specify that the pivot table has creation and update columns .
72
12
224,888
def _get_eager_model_keys ( self , models ) : keys = [ ] for model in models : value = getattr ( model , self . _foreign_key ) if value is not None and value not in keys : keys . append ( value ) if not len ( keys ) : return [ 0 ] return keys
Gather the keys from a list of related models .
69
11
224,889
def update ( self , _attributes = None , * * attributes ) : if _attributes is not None : attributes . update ( _attributes ) instance = self . get_results ( ) return instance . fill ( attributes ) . save ( )
Update the parent model on the relationship .
52
8
224,890
def _build_dictionary ( self , models ) : for model in models : key = getattr ( model , self . _morph_type , None ) if key : foreign = getattr ( model , self . _foreign_key ) if key not in self . _dictionary : self . _dictionary [ key ] = { } if foreign not in self . _dictionary [ key ] : self . _dictionary [ key ] [ foreign ] = [ ] self . _dictionary [ key ] [ foreign ] . append ( model )
Build a dictionary with the models .
114
7
224,891
def get_eager ( self ) : for type in self . _dictionary . keys ( ) : self . _match_to_morph_parents ( type , self . _get_results_by_type ( type ) ) return self . _models
Get the relationship for eager loading .
54
7
224,892
def _match_to_morph_parents ( self , type , results ) : for result in results : if result . get_key ( ) in self . _dictionary . get ( type , [ ] ) : for model in self . _dictionary [ type ] [ result . get_key ( ) ] : model . set_relation ( self . _relation , Result ( result , self , model , related = result ) )
Match the results for a given type to their parent .
90
11
224,893
def _get_results_by_type ( self , type ) : instance = self . _create_model_by_type ( type ) key = instance . get_key_name ( ) query = instance . new_query ( ) query = self . _use_with_trashed ( query ) return query . where_in ( key , self . _gather_keys_by_type ( type ) . all ( ) ) . get ( )
Get all the relation results for a type .
96
9
224,894
def _gather_keys_by_type ( self , type ) : foreign = self . _foreign_key keys = ( BaseCollection . make ( list ( self . _dictionary [ type ] . values ( ) ) ) . map ( lambda models : getattr ( models [ 0 ] , foreign ) ) . unique ( ) ) return keys
Gather all of the foreign keys for a given type .
72
12
224,895
def set_primary_key ( self , columns , index_name = False ) : self . _add_index ( self . _create_index ( columns , index_name or "primary" , True , True ) ) for column_name in columns : column = self . get_column ( column_name ) column . set_notnull ( True ) return self
Set the primary key .
77
5
224,896
def drop_index ( self , name ) : name = self . _normalize_identifier ( name ) if not self . has_index ( name ) : raise IndexDoesNotExist ( name , self . _name ) del self . _indexes [ name ]
Drops an index from this table .
57
8
224,897
def rename_index ( self , old_name , new_name = None ) : old_name = self . _normalize_identifier ( old_name ) normalized_new_name = self . _normalize_identifier ( new_name ) if old_name == normalized_new_name : return self if not self . has_index ( old_name ) : raise IndexDoesNotExist ( old_name , self . _name ) if self . has_index ( normalized_new_name ) : raise IndexAlreadyExists ( normalized_new_name , self . _name ) old_index = self . _indexes [ old_name ] if old_index . is_primary ( ) : self . drop_primary_key ( ) return self . set_primary_key ( old_index . get_columns ( ) , new_name ) del self . _indexes [ old_name ] if old_index . is_unique ( ) : return self . add_unique_index ( old_index . get_columns ( ) , new_name ) return self . add_index ( old_index . get_columns ( ) , new_name , old_index . get_flags ( ) )
Renames an index .
263
5
224,898
def columns_are_indexed ( self , columns ) : for index in self . _indexes . values ( ) : if index . spans_columns ( columns ) : return True return False
Checks if an index begins in the order of the given columns .
41
14
224,899
def _create_index ( self , columns , name , is_unique , is_primary , flags = None , options = None ) : if re . match ( "[^a-zA-Z0-9_]+" , self . _normalize_identifier ( name ) ) : raise IndexNameInvalid ( name ) for column in columns : if isinstance ( column , dict ) : column = list ( column . keys ( ) ) [ 0 ] if not self . has_column ( column ) : raise ColumnDoesNotExist ( column , self . _name ) return Index ( name , columns , is_unique , is_primary , flags , options )
Creates an Index instance .
140
6