idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
36,800 | def _summary ( self , contract ) : ret = '' if len ( contract . immediate_inheritance ) == 1 : ret += '%s -> %s;\n' % ( contract . name , contract . immediate_inheritance [ 0 ] ) else : for i in range ( 0 , len ( contract . immediate_inheritance ) ) : ret += '%s -> %s [ label="%s" ];\n' % ( contract . name , contract . immediate_inheritance [ i ] , i + 1 ) visibilities = [ 'public' , 'external' ] public_functions = [ self . _get_pattern_func ( f , contract ) for f in contract . functions if not f . is_constructor and f . contract == contract and f . visibility in visibilities ] public_functions = '' . join ( public_functions ) private_functions = [ self . _get_pattern_func ( f , contract ) for f in contract . functions if not f . is_constructor and f . contract == contract and f . visibility not in visibilities ] private_functions = '' . join ( private_functions ) modifiers = [ self . _get_pattern_func ( m , contract ) for m in contract . modifiers if m . contract == contract ] modifiers = '' . join ( modifiers ) public_variables = [ self . _get_pattern_var ( v , contract ) for v in contract . variables if v . contract == contract and v . visibility in visibilities ] public_variables = '' . join ( public_variables ) private_variables = [ self . _get_pattern_var ( v , contract ) for v in contract . variables if v . contract == contract and v . visibility not in visibilities ] private_variables = '' . join ( private_variables ) indirect_shadowing_information = self . _get_indirect_shadowing_information ( contract ) ret += '%s[shape="box"' % contract . name ret += 'label=< <TABLE border="0">' ret += '<TR><TD align="center"><B>%s</B></TD></TR>' % contract . name if public_functions : ret += '<TR><TD align="left"><I>Public Functions:</I></TD></TR>' ret += '%s' % public_functions if private_functions : ret += '<TR><TD align="left"><I>Private Functions:</I></TD></TR>' ret += '%s' % private_functions if modifiers : ret += '<TR><TD align="left"><I>Modifiers:</I></TD></TR>' ret += '%s' % modifiers if public_variables : ret += '<TR><TD align="left"><I>Public Variables:</I></TD></TR>' ret += '%s' % public_variables if private_variables : ret += '<TR><TD align="left"><I>Private Variables:</I></TD></TR>' ret += '%s' % private_variables if indirect_shadowing_information : ret += '<TR><TD><BR/></TD></TR><TR><TD align="left" border="1"><font color="#777777" point-size="10">%s</font></TD></TR>' % indirect_shadowing_information . replace ( '\n' , '<BR/>' ) ret += '</TABLE> >];\n' return ret | Build summary using HTML |
36,801 | def detect_builtin_shadowing_definitions ( self , contract ) : result = [ ] for function in contract . functions : if function . contract == contract : if self . is_builtin_symbol ( function . name ) : result . append ( ( self . SHADOWING_FUNCTION , function , None ) ) result += self . detect_builtin_shadowing_locals ( function ) for modifier in contract . modifiers : if modifier . contract == contract : if self . is_builtin_symbol ( modifier . name ) : result . append ( ( self . SHADOWING_MODIFIER , modifier , None ) ) result += self . detect_builtin_shadowing_locals ( modifier ) for variable in contract . variables : if variable . contract == contract : if self . is_builtin_symbol ( variable . name ) : result . append ( ( self . SHADOWING_STATE_VARIABLE , variable , None ) ) for event in contract . events : if event . contract == contract : if self . is_builtin_symbol ( event . name ) : result . append ( ( self . SHADOWING_EVENT , event , None ) ) return result | Detects if functions access modifiers events state variables or local variables are named after built - in symbols . Any such definitions are returned in a list . |
36,802 | def _detect ( self ) : results = [ ] for contract in self . contracts : shadows = self . detect_builtin_shadowing_definitions ( contract ) if shadows : for shadow in shadows : shadow_type = shadow [ 0 ] shadow_object = shadow [ 1 ] local_variable_parent = shadow [ 2 ] local_variable_path = contract . name + "." if local_variable_parent is not None : local_variable_path += local_variable_parent . name + "." local_variable_path += shadow_object . name info = '{} ({} @ {}) shadows built-in symbol \"{}"\n' . format ( local_variable_path , shadow_type , shadow_object . source_mapping_str , shadow_object . name ) json = self . generate_json_result ( info ) if shadow_type in [ self . SHADOWING_FUNCTION , self . SHADOWING_MODIFIER , self . SHADOWING_EVENT ] : self . add_function_to_json ( shadow_object , json ) elif shadow_type in [ self . SHADOWING_STATE_VARIABLE , self . SHADOWING_LOCAL_VARIABLE ] : self . add_variable_to_json ( shadow_object , json ) results . append ( json ) return results | Detect shadowing of built - in symbols |
36,803 | def detect_c3_function_shadowing ( contract ) : results = { } for i in range ( 0 , len ( contract . immediate_inheritance ) - 1 ) : inherited_contract1 = contract . immediate_inheritance [ i ] for function1 in inherited_contract1 . functions_and_modifiers : if function1 . full_name in results or function1 . is_constructor or not function1 . is_implemented : continue functions_matching = [ ( inherited_contract1 , function1 ) ] already_processed = set ( [ function1 ] ) for x in range ( i + 1 , len ( contract . immediate_inheritance ) ) : inherited_contract2 = contract . immediate_inheritance [ x ] for function2 in inherited_contract2 . functions_and_modifiers : if function2 in already_processed or function2 . is_constructor or not function2 . is_implemented : continue if function1 . full_name == function2 . full_name : functions_matching . append ( ( inherited_contract2 , function2 ) ) already_processed . add ( function2 ) if len ( functions_matching ) > 1 : results [ function1 . full_name ] = functions_matching return list ( results . values ( ) ) | Detects and obtains functions which are indirectly shadowed via multiple inheritance by C3 linearization properties despite not directly inheriting from each other . |
36,804 | def _detect ( self ) : results = [ ] for c in self . slither . contracts_derived : ret = self . detect_uninitialized ( c ) for variable , functions in ret : info = "{}.{} ({}) is never initialized. It is used in:\n" info = info . format ( variable . contract . name , variable . name , variable . source_mapping_str ) for f in functions : info += "\t- {} ({})\n" . format ( f . name , f . source_mapping_str ) source = [ variable . source_mapping ] source += [ f . source_mapping for f in functions ] json = self . generate_json_result ( info ) self . add_variable_to_json ( variable , json ) self . add_functions_to_json ( functions , json ) results . append ( json ) return results | Detect uninitialized state variables |
36,805 | def detect_functions_called ( contract ) : result = [ ] for func in contract . all_functions_called : for node in func . nodes : for ir in node . irs : if isinstance ( ir , ( InternalCall , SolidityCall ) ) : result . append ( ir . function ) return result | Returns a list of InternallCall SolidityCall calls made in a function |
36,806 | def _contains_internal_dynamic_call ( contract ) : for func in contract . all_functions_called : for node in func . nodes : for ir in node . irs : if isinstance ( ir , ( InternalDynamicCall ) ) : return True return False | Checks if a contract contains a dynamic call either in a direct definition or through inheritance . |
36,807 | def get_base_most_function ( function ) : for contract in function . contract . inheritance + [ function . contract ] : for f in contract . functions_not_inherited : if f . full_name == function . full_name : return f raise Exception ( "Could not resolve the base-most function for the provided function." ) | Obtains the base function definition for the provided function . This could be used to obtain the original definition of a function if the provided function is an override . |
36,808 | def get_all_function_definitions ( base_most_function ) : return [ base_most_function ] + [ function for derived_contract in base_most_function . contract . derived_contracts for function in derived_contract . functions if function . full_name == base_most_function . full_name ] | Obtains all function definitions given a base - most function . This includes the provided function plus any overrides of that function . |
36,809 | def detect_complex_func ( func ) : result = [ ] code_complexity = compute_cyclomatic_complexity ( func ) if code_complexity > ComplexFunction . MAX_CYCLOMATIC_COMPLEXITY : result . append ( { "func" : func , "cause" : ComplexFunction . CAUSE_CYCLOMATIC } ) count = 0 for node in func . nodes : for ir in node . irs : if isinstance ( ir , ( HighLevelCall , LowLevelCall , LibraryCall ) ) : count += 1 if count > ComplexFunction . MAX_EXTERNAL_CALLS : result . append ( { "func" : func , "cause" : ComplexFunction . CAUSE_EXTERNAL_CALL } ) if len ( func . state_variables_written ) > ComplexFunction . MAX_STATE_VARIABLES : result . append ( { "func" : func , "cause" : ComplexFunction . CAUSE_STATE_VARS } ) return result | Detect the cyclomatic complexity of the contract functions shouldn t be greater than 7 |
36,810 | def _detect ( self ) : results = [ ] for c in self . slither . contracts_derived : unusedVars = self . detect_unused ( c ) if unusedVars : info = '' for var in unusedVars : info += "{}.{} ({}) is never used in {}\n" . format ( var . contract . name , var . name , var . source_mapping_str , c . name ) json = self . generate_json_result ( info ) self . add_variables_to_json ( unusedVars , json ) results . append ( json ) return results | Detect unused state variables |
36,811 | def _detect ( self ) : results = [ ] self . results = [ ] self . visited_all_paths = { } for contract in self . slither . contracts : for function in contract . functions : if function . is_implemented and function . contract == contract : if function . contains_assembly : continue uninitialized_local_variables = [ v for v in function . local_variables if not v . is_storage and v . uninitialized ] function . entry_point . context [ self . key ] = uninitialized_local_variables self . _detect_uninitialized ( function , function . entry_point , [ ] ) all_results = list ( set ( self . results ) ) for ( function , uninitialized_local_variable ) in all_results : var_name = uninitialized_local_variable . name info = "{} in {}.{} ({}) is a local variable never initialiazed\n" info = info . format ( var_name , function . contract . name , function . name , uninitialized_local_variable . source_mapping_str ) json = self . generate_json_result ( info ) self . add_variable_to_json ( uninitialized_local_variable , json ) self . add_function_to_json ( function , json ) results . append ( json ) return results | Detect uninitialized local variables |
36,812 | def _detect ( self ) : results = [ ] for c in self . contracts : unindexed_params = self . detect_erc20_unindexed_event_params ( c ) if unindexed_params : info = "{} ({}) does not mark important ERC20 parameters as 'indexed':\n" info = info . format ( c . name , c . source_mapping_str ) for ( event , parameter ) in unindexed_params : info += "\t-{} ({}) does not index parameter '{}'\n" . format ( event . name , event . source_mapping_str , parameter . name ) json = self . generate_json_result ( info ) self . add_functions_to_json ( [ event for event , _ in unindexed_params ] , json ) results . append ( json ) return results | Detect un - indexed ERC20 event parameters in all contracts . |
36,813 | def print_functions ( self , d ) : for c in self . contracts : for f in c . functions : f . cfg_to_dot ( os . path . join ( d , '{}.{}.dot' . format ( c . name , f . name ) ) ) | Export all the functions to dot files |
36,814 | def output ( self , filename ) : info = 'Inheritance\n' if not self . contracts : return info += blue ( 'Child_Contract -> ' ) + green ( 'Immediate_Base_Contracts' ) info += green ( ' [Not_Immediate_Base_Contracts]' ) for child in self . contracts : info += blue ( f'\n+ {child.name}' ) if child . inheritance : immediate = child . immediate_inheritance not_immediate = [ i for i in child . inheritance if i not in immediate ] info += ' -> ' + green ( ", " . join ( map ( str , immediate ) ) ) if not_immediate : info += ", [" + green ( ", " . join ( map ( str , not_immediate ) ) ) + "]" info += green ( '\n\nBase_Contract -> ' ) + blue ( 'Immediate_Child_Contracts' ) info += blue ( ' [Not_Immediate_Child_Contracts]' ) for base in self . contracts : info += green ( f'\n+ {base.name}' ) children = list ( self . _get_child_contracts ( base ) ) if children : immediate = [ child for child in children if base in child . immediate_inheritance ] not_immediate = [ child for child in children if not child in immediate ] info += ' -> ' + blue ( ", " . join ( map ( str , immediate ) ) ) if not_immediate : info += ', [' + blue ( ", " . join ( map ( str , not_immediate ) ) ) + ']' self . info ( info ) | Output the inheritance relation |
36,815 | def _detect ( self ) : results = [ ] self . results = [ ] self . visited_all_paths = { } for contract in self . slither . contracts : for function in contract . functions : if function . is_implemented : uninitialized_storage_variables = [ v for v in function . local_variables if v . is_storage and v . uninitialized ] function . entry_point . context [ self . key ] = uninitialized_storage_variables self . _detect_uninitialized ( function , function . entry_point , [ ] ) for ( function , uninitialized_storage_variable ) in self . results : var_name = uninitialized_storage_variable . name info = "{} in {}.{} ({}) is a storage variable never initialiazed\n" info = info . format ( var_name , function . contract . name , function . name , uninitialized_storage_variable . source_mapping_str ) json = self . generate_json_result ( info ) self . add_variable_to_json ( uninitialized_storage_variable , json ) self . add_function_to_json ( function , json ) results . append ( json ) return results | Detect uninitialized storage variables |
36,816 | def _can_callback ( self , irs ) : for ir in irs : if isinstance ( ir , LowLevelCall ) : return True if isinstance ( ir , HighLevelCall ) and not isinstance ( ir , LibraryCall ) : if self . slither . solc_version and self . slither . solc_version . startswith ( '0.5.' ) : if isinstance ( ir . function , Function ) and ( ir . function . view or ir . function . pure ) : continue if isinstance ( ir . function , Variable ) : continue if ir . destination == SolidityVariable ( 'this' ) : if isinstance ( ir . function , Variable ) : continue if not ir . function . all_high_level_calls ( ) : if not ir . function . all_low_level_calls ( ) : continue return True return False | Detect if the node contains a call that can be used to re - entrance |
36,817 | def _can_send_eth ( irs ) : for ir in irs : if isinstance ( ir , ( HighLevelCall , LowLevelCall , Transfer , Send ) ) : if ir . call_value : return True return False | Detect if the node can send eth |
36,818 | def remove_father ( self , father ) : self . _fathers = [ x for x in self . _fathers if x . node_id != father . node_id ] | Remove the father node . Do nothing if the node is not a father |
36,819 | def remove_son ( self , son ) : self . _sons = [ x for x in self . _sons if x . node_id != son . node_id ] | Remove the son node . Do nothing if the node is not a son |
36,820 | def detect_deprecated_references_in_node ( self , node ) : results = [ ] if node . expression : results += self . detect_deprecation_in_expression ( node . expression ) for dep_node in self . DEPRECATED_NODE_TYPES : if node . type == dep_node [ 0 ] : results . append ( dep_node ) return results | Detects if a node makes use of any deprecated standards . |
36,821 | def detect_deprecated_references_in_contract ( self , contract ) : results = [ ] for state_variable in contract . variables : if state_variable . contract != contract : continue if state_variable . expression : deprecated_results = self . detect_deprecation_in_expression ( state_variable . expression ) if deprecated_results : results . append ( ( state_variable , deprecated_results ) ) for function in contract . functions + contract . modifiers : if function . contract != contract : continue for node in function . nodes : deprecated_results = self . detect_deprecated_references_in_node ( node ) for ir in node . irs : if isinstance ( ir , LowLevelCall ) : for dep_llc in self . DEPRECATED_LOW_LEVEL_CALLS : if ir . function_name == dep_llc [ 0 ] : deprecated_results . append ( dep_llc ) if deprecated_results : results . append ( ( node , deprecated_results ) ) return results | Detects the usage of any deprecated built - in symbols . |
36,822 | def process ( filename , args , detector_classes , printer_classes ) : ast = '--ast-compact-json' if args . legacy_ast : ast = '--ast-json' args . filter_paths = parse_filter_paths ( args ) slither = Slither ( filename , ast_format = ast , ** vars ( args ) ) return _process ( slither , detector_classes , printer_classes ) | The core high - level code for running Slither static analysis . |
36,823 | def _detect ( self ) : results = [ ] for c in self . contracts : for f in c . functions : if f . contract != c : continue if f . view or f . pure : if f . contains_assembly : attr = 'view' if f . view else 'pure' info = '{}.{} ({}) is declared {} but contains assembly code\n' info = info . format ( f . contract . name , f . name , f . source_mapping_str , attr ) json = self . generate_json_result ( info ) self . add_function_to_json ( f , json ) json [ 'elements' ] . append ( { 'type' : 'info' , 'contains_assembly' : True } ) results . append ( json ) variables_written = f . all_state_variables_written ( ) if variables_written : attr = 'view' if f . view else 'pure' info = '{}.{} ({}) is declared {} but changes state variables:\n' info = info . format ( f . contract . name , f . name , f . source_mapping_str , attr ) for variable_written in variables_written : info += '\t- {}.{}\n' . format ( variable_written . contract . name , variable_written . name ) json = self . generate_json_result ( info ) self . add_function_to_json ( f , json ) self . add_variables_to_json ( variables_written , json ) json [ 'elements' ] . append ( { 'type' : 'info' , 'contains_assembly' : False } ) results . append ( json ) return results | Detect the constant function changing the state |
36,824 | def constructor ( self ) : cst = self . constructor_not_inherited if cst : return cst for inherited_contract in self . inheritance : cst = inherited_contract . constructor_not_inherited if cst : return cst return None | Return the contract s immediate constructor . If there is no immediate constructor returns the first constructor executed following the c3 linearization Return None if there is no constructor . |
36,825 | def get_functions_reading_from_variable ( self , variable ) : return [ f for f in self . functions if f . is_reading ( variable ) ] | Return the functions reading the variable |
36,826 | def get_functions_writing_to_variable ( self , variable ) : return [ f for f in self . functions if f . is_writing ( variable ) ] | Return the functions writting the variable |
36,827 | def get_source_var_declaration ( self , var ) : return next ( ( x . source_mapping for x in self . variables if x . name == var ) ) | Return the source mapping where the variable is declared |
36,828 | def get_source_event_declaration ( self , event ) : return next ( ( x . source_mapping for x in self . events if x . name == event ) ) | Return the source mapping where the event is declared |
36,829 | def get_summary ( self ) : func_summaries = [ f . get_summary ( ) for f in self . functions ] modif_summaries = [ f . get_summary ( ) for f in self . modifiers ] return ( self . name , [ str ( x ) for x in self . inheritance ] , [ str ( x ) for x in self . variables ] , func_summaries , modif_summaries ) | Return the function summary |
36,830 | def is_erc20 ( self ) : full_names = [ f . full_name for f in self . functions ] return 'transfer(address,uint256)' in full_names and 'transferFrom(address,address,uint256)' in full_names and 'approve(address,uint256)' in full_names | Check if the contract is an erc20 token |
36,831 | def integrate_value_gas ( result ) : was_changed = True calls = [ ] while was_changed : was_changed = False assigments = { } for i in result : if isinstance ( i , OperationWithLValue ) : assigments [ i . lvalue . name ] = i if isinstance ( i , TmpCall ) : if isinstance ( i . called , Variable ) and i . called . name in assigments : ins_ori = assigments [ i . called . name ] i . set_ori ( ins_ori ) to_remove = [ ] variable_to_replace = { } for idx in range ( len ( result ) ) : ins = result [ idx ] if is_value ( ins ) and isinstance ( result [ idx - 1 ] , Argument ) : was_changed = True result [ idx - 1 ] . set_type ( ArgumentType . VALUE ) result [ idx - 1 ] . call_id = ins . ori . variable_left . name calls . append ( ins . ori . variable_left ) to_remove . append ( ins ) variable_to_replace [ ins . lvalue . name ] = ins . ori . variable_left elif is_gas ( ins ) and isinstance ( result [ idx - 1 ] , Argument ) : was_changed = True result [ idx - 1 ] . set_type ( ArgumentType . GAS ) result [ idx - 1 ] . call_id = ins . ori . variable_left . name calls . append ( ins . ori . variable_left ) to_remove . append ( ins ) variable_to_replace [ ins . lvalue . name ] = ins . ori . variable_left result = [ i for i in result if not i in to_remove ] for ins in result : if isinstance ( ins , TmpCall ) : while ins . called . name in variable_to_replace : was_changed = True ins . call_id = variable_to_replace [ ins . called . name ] . name calls . append ( ins . called ) ins . called = variable_to_replace [ ins . called . name ] if isinstance ( ins , Argument ) : while ins . call_id in variable_to_replace : was_changed = True ins . call_id = variable_to_replace [ ins . call_id ] . name calls = list ( set ( [ str ( c ) for c in calls ] ) ) idx = 0 calls_d = { } for call in calls : calls_d [ str ( call ) ] = idx idx = idx + 1 return result | Integrate value and gas temporary arguments to call instruction |
36,832 | def propagate_type_and_convert_call ( result , node ) : calls_value = { } calls_gas = { } call_data = [ ] idx = 0 while idx < len ( result ) : ins = result [ idx ] if isinstance ( ins , TmpCall ) : new_ins = extract_tmp_call ( ins , node . function . contract ) if new_ins : new_ins . set_node ( ins . node ) ins = new_ins result [ idx ] = ins if isinstance ( ins , Argument ) : if ins . get_type ( ) in [ ArgumentType . GAS ] : assert not ins . call_id in calls_gas calls_gas [ ins . call_id ] = ins . argument elif ins . get_type ( ) in [ ArgumentType . VALUE ] : assert not ins . call_id in calls_value calls_value [ ins . call_id ] = ins . argument else : assert ins . get_type ( ) == ArgumentType . CALL call_data . append ( ins . argument ) if isinstance ( ins , ( HighLevelCall , NewContract , InternalDynamicCall ) ) : if ins . call_id in calls_value : ins . call_value = calls_value [ ins . call_id ] if ins . call_id in calls_gas : ins . call_gas = calls_gas [ ins . call_id ] if isinstance ( ins , ( Call , NewContract , NewStructure ) ) : ins . arguments = call_data call_data = [ ] if is_temporary ( ins ) : del result [ idx ] continue new_ins = propagate_types ( ins , node ) if new_ins : if isinstance ( new_ins , ( list , ) ) : if len ( new_ins ) == 2 : new_ins [ 0 ] . set_node ( ins . node ) new_ins [ 1 ] . set_node ( ins . node ) del result [ idx ] result . insert ( idx , new_ins [ 0 ] ) result . insert ( idx + 1 , new_ins [ 1 ] ) idx = idx + 1 else : assert len ( new_ins ) == 3 new_ins [ 0 ] . set_node ( ins . node ) new_ins [ 1 ] . set_node ( ins . node ) new_ins [ 2 ] . set_node ( ins . node ) del result [ idx ] result . insert ( idx , new_ins [ 0 ] ) result . insert ( idx + 1 , new_ins [ 1 ] ) result . insert ( idx + 2 , new_ins [ 2 ] ) idx = idx + 2 else : new_ins . set_node ( ins . node ) result [ idx ] = new_ins idx = idx + 1 return result | Propagate the types variables and convert tmp call to real call operation |
36,833 | def convert_to_push ( ir , node ) : lvalue = ir . lvalue if isinstance ( ir . arguments [ 0 ] , list ) : ret = [ ] val = TemporaryVariable ( node ) operation = InitArray ( ir . arguments [ 0 ] , val ) ret . append ( operation ) ir = Push ( ir . destination , val ) length = Literal ( len ( operation . init_values ) ) t = operation . init_values [ 0 ] . type ir . lvalue . set_type ( ArrayType ( t , length ) ) ret . append ( ir ) if lvalue : length = Length ( ir . array , lvalue ) length . lvalue . points_to = ir . lvalue ret . append ( length ) return ret ir = Push ( ir . destination , ir . arguments [ 0 ] ) if lvalue : ret = [ ] ret . append ( ir ) length = Length ( ir . array , lvalue ) length . lvalue . points_to = ir . lvalue ret . append ( length ) return ret return ir | Convert a call to a PUSH operaiton |
36,834 | def get_type ( t ) : if isinstance ( t , UserDefinedType ) : if isinstance ( t . type , Contract ) : return 'address' return str ( t ) | Convert a type to a str If the instance is a Contract return address instead |
36,835 | def find_references_origin ( irs ) : for ir in irs : if isinstance ( ir , ( Index , Member ) ) : ir . lvalue . points_to = ir . variable_left | Make lvalue of each Index Member operation points to the left variable |
36,836 | def apply_ir_heuristics ( irs , node ) : irs = integrate_value_gas ( irs ) irs = propagate_type_and_convert_call ( irs , node ) irs = remove_unused ( irs ) find_references_origin ( irs ) return irs | Apply a set of heuristic to improve slithIR |
36,837 | def return_type ( self ) : returns = self . returns if returns : return [ r . type for r in returns ] return None | Return the list of return type If no return return None |
36,838 | def all_solidity_variables_read ( self ) : if self . _all_solidity_variables_read is None : self . _all_solidity_variables_read = self . _explore_functions ( lambda x : x . solidity_variables_read ) return self . _all_solidity_variables_read | recursive version of solidity_read |
36,839 | def all_state_variables_written ( self ) : if self . _all_state_variables_written is None : self . _all_state_variables_written = self . _explore_functions ( lambda x : x . state_variables_written ) return self . _all_state_variables_written | recursive version of variables_written |
36,840 | def all_internal_calls ( self ) : if self . _all_internals_calls is None : self . _all_internals_calls = self . _explore_functions ( lambda x : x . internal_calls ) return self . _all_internals_calls | recursive version of internal_calls |
36,841 | def all_low_level_calls ( self ) : if self . _all_low_level_calls is None : self . _all_low_level_calls = self . _explore_functions ( lambda x : x . low_level_calls ) return self . _all_low_level_calls | recursive version of low_level calls |
36,842 | def all_high_level_calls ( self ) : if self . _all_high_level_calls is None : self . _all_high_level_calls = self . _explore_functions ( lambda x : x . high_level_calls ) return self . _all_high_level_calls | recursive version of high_level calls |
36,843 | def all_library_calls ( self ) : if self . _all_library_calls is None : self . _all_library_calls = self . _explore_functions ( lambda x : x . library_calls ) return self . _all_library_calls | recursive version of library calls |
36,844 | def all_conditional_state_variables_read ( self , include_loop = True ) : if include_loop : if self . _all_conditional_state_variables_read_with_loop is None : self . _all_conditional_state_variables_read_with_loop = self . _explore_functions ( lambda x : self . _explore_func_cond_read ( x , include_loop ) ) return self . _all_conditional_state_variables_read_with_loop else : if self . _all_conditional_state_variables_read is None : self . _all_conditional_state_variables_read = self . _explore_functions ( lambda x : self . _explore_func_cond_read ( x , include_loop ) ) return self . _all_conditional_state_variables_read | Return the state variable used in a condition |
36,845 | def all_conditional_solidity_variables_read ( self , include_loop = True ) : if include_loop : if self . _all_conditional_solidity_variables_read_with_loop is None : self . _all_conditional_solidity_variables_read_with_loop = self . _explore_functions ( lambda x : self . _explore_func_conditional ( x , self . _solidity_variable_in_binary , include_loop ) ) return self . _all_conditional_solidity_variables_read_with_loop else : if self . _all_conditional_solidity_variables_read is None : self . _all_conditional_solidity_variables_read = self . _explore_functions ( lambda x : self . _explore_func_conditional ( x , self . _solidity_variable_in_binary , include_loop ) ) return self . _all_conditional_solidity_variables_read | Return the Soldiity variables directly used in a condtion |
36,846 | def all_solidity_variables_used_as_args ( self ) : if self . _all_solidity_variables_used_as_args is None : self . _all_solidity_variables_used_as_args = self . _explore_functions ( lambda x : self . _explore_func_nodes ( x , self . _solidity_variable_in_internal_calls ) ) return self . _all_solidity_variables_used_as_args | Return the Soldiity variables directly used in a call |
36,847 | def is_protected ( self ) : if self . is_constructor : return True conditional_vars = self . all_conditional_solidity_variables_read ( include_loop = False ) args_vars = self . all_solidity_variables_used_as_args ( ) return SolidityVariableComposed ( 'msg.sender' ) in conditional_vars + args_vars | Determine if the function is protected using a check on msg . sender |
36,848 | def auth_string ( self ) : username_token = '{username}:{token}' . format ( username = self . username , token = self . token ) b64encoded_string = b64encode ( username_token ) auth_string = 'Token {b64}' . format ( b64 = b64encoded_string ) return auth_string | Authenticate based on username and token which is base64 - encoded |
36,849 | def api_related ( self , query ) : url = "{0}/{1}/related/?format=json" . format ( self . base_url , query ) response = requests . get ( url , headers = self . headers , verify = self . verify_ssl ) if response . status_code == 200 : return response . json ( ) else : self . error ( 'Received status code: {0} from Soltra Server. Content:\n{1}' . format ( response . status_code , response . text ) ) | Find related objects through SoltraEdge API |
36,850 | def tlp_classifiers ( self , name_tlp , val_tlp ) : classifier = { "WHITE" : 0 , "GREEN" : 1 , "AMBER" : 2 , "RED" : 3 } valid = True if classifier [ name_tlp ] > val_tlp : valid = False return valid | Classifier between Cortex and Soltra . Soltra uses name - TLP and Cortex value - TLP |
36,851 | def pop_object ( self , element ) : redacted_text = "Redacted. Object contained TLP value higher than allowed." element [ 'id' ] = '' element [ 'url' ] = '' element [ 'type' ] = '' element [ 'tags' ] = [ ] element [ 'etlp' ] = None element [ 'title' ] = redacted_text element [ 'tlpColor' ] = element [ 'tlpColor' ] element [ 'uploaded_on' ] = '' element [ 'uploaded_by' ] = '' element [ 'description' ] = redacted_text element [ 'children_types' ] = [ ] element [ 'summary' ] [ 'type' ] = '' element [ 'summary' ] [ 'value' ] = '' element [ 'summary' ] [ 'title' ] = redacted_text element [ 'summary' ] [ 'description' ] = redacted_text return element | Pop the object element if the object contains an higher TLP then allowed . |
36,852 | def __query ( domain , limit = 100 ) : s = check_output ( [ '{}' . format ( os . path . join ( os . path . dirname ( __file__ ) , 'whois.sh' ) ) , '--limit {} {}' . format ( limit , domain ) ] , universal_newlines = True ) return s | Using the shell script to query pdns . cert . at is a hack but python raises an error every time using subprocess functions to call whois . So this hack is avoiding calling whois directly . Ugly but works . |
36,853 | def analyze_vba ( self , path ) : try : vba_parser = VBA_Parser_CLI ( path , relaxed = True ) vbaparser_result = vba_parser . process_file_json ( show_decoded_strings = True , display_code = True , hide_attributes = False , vba_code_only = False , show_deobfuscated_code = True , deobfuscate = True ) self . add_result_subsection ( 'Olevba' , vbaparser_result ) except TypeError : self . add_result_subsection ( 'Oletools VBA Analysis failed' , 'Analysis failed due to an filetype error.' 'The file does not seem to be a valid MS-Office ' 'file.' ) | Analyze a given sample for malicious vba . |
36,854 | def get ( self , ip_address ) : address = ipaddress . ip_address ( ip_address ) if address . version == 6 and self . _metadata . ip_version == 4 : raise ValueError ( 'Error looking up {0}. You attempted to look up ' 'an IPv6 address in an IPv4-only database.' . format ( ip_address ) ) pointer = self . _find_address_in_tree ( address ) return self . _resolve_data_pointer ( pointer ) if pointer else None | Return the record for the ip_address in the MaxMind DB |
36,855 | def search ( self , domain , wildcard = True ) : base_url = "https://crt.sh/?q={}&output=json" if wildcard : domain = "%25.{}" . format ( domain ) url = base_url . format ( domain ) ua = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1' req = requests . get ( url , headers = { 'User-Agent' : ua } ) if req . ok : try : content = req . content . decode ( 'utf-8' ) data = json . loads ( content . replace ( '}{' , '},{' ) ) return data except Exception : self . error ( "Error retrieving information." ) return None | Search crt . sh for the given domain . |
36,856 | def __search ( self , value , type_attribute ) : results = [ ] if not value : raise EmptySearchtermError for idx , connection in enumerate ( self . misp_connections ) : misp_response = connection . search ( type_attribute = type_attribute , values = value ) if isinstance ( self . misp_name , list ) : name = self . misp_name [ idx ] else : name = self . misp_name results . append ( { 'url' : connection . root_url , 'name' : name , 'result' : self . __clean ( misp_response ) } ) return results | Search method call wrapper . |
36,857 | def search_tor_node ( self , data_type , data ) : results = [ ] if data_type == 'ip' : results = self . _get_node_from_ip ( data ) elif data_type == 'fqdn' : results = self . _get_node_from_fqdn ( data ) elif data_type == 'domain' : results = self . _get_node_from_domain ( data ) else : pass return { "nodes" : results } | Lookup an artifact to check if it is a known tor exit node . |
36,858 | def check ( self , file ) : result = [ ] for rule in self . ruleset : matches = rule . match ( file ) for match in matches : result . append ( str ( match ) ) return result | Checks a given file against all available yara rules |
36,859 | def query ( self , domain ) : result = { } try : result = self . pdns . query ( domain ) except : self . error ( 'Exception while querying passiveDNS. Check the domain format.' ) clean_result = [ ] for ind , resultset in enumerate ( result ) : if resultset . get ( 'time_first' , None ) : resultset [ 'time_first' ] = resultset . get ( 'time_first' ) . isoformat ( ' ' ) if resultset . get ( 'time_last' , None ) : resultset [ 'time_last' ] = resultset . get ( 'time_last' ) . isoformat ( ' ' ) clean_result . append ( resultset ) return clean_result | The actual query happens here . Time from queries is replaced with isoformat . |
36,860 | def query_ip ( self , ip ) : try : result = self . pssl . query ( ip ) except : self . error ( 'Exception during processing with passiveSSL. ' 'Please check the format of ip.' ) if not result . get ( ip , None ) : certificates = [ ] else : certificates = list ( result . get ( ip ) . get ( 'certificates' ) ) newresult = { 'ip' : ip , 'certificates' : [ ] } for cert in certificates : newresult [ 'certificates' ] . append ( { 'fingerprint' : cert , 'subject' : result . get ( ip ) . get ( 'subjects' ) . get ( cert ) . get ( 'values' ) [ 0 ] } ) return newresult | Queries Circl . lu Passive SSL for an ip using PyPSSL class . Returns error if nothing is found . |
36,861 | def query_certificate ( self , cert_hash ) : try : cquery = self . pssl . query_cert ( cert_hash ) except Exception : self . error ( 'Exception during processing with passiveSSL. ' 'This happens if the given hash is not sha1 or contains dashes/colons etc. ' 'Please make sure to submit a clean formatted sha1 hash.' ) try : cfetch = self . pssl . fetch_cert ( cert_hash , make_datetime = False ) except Exception : cfetch = { } return { 'query' : cquery , 'cert' : cfetch } | Queries Circl . lu Passive SSL for a certificate hash using PyPSSL class . Returns error if nothing is found . |
36,862 | def _get_level ( current_level , new_intention ) : intention_level_map = OrderedDict ( [ ( 'info' , 'info' ) , ( 'benign' , 'safe' ) , ( 'suspicious' , 'suspicious' ) , ( 'malicious' , 'malicious' ) ] ) levels = intention_level_map . values ( ) new_level = intention_level_map . get ( new_intention , 'info' ) new_index = levels . index ( new_level ) try : current_index = levels . index ( current_level ) except ValueError : current_index = - 1 return new_level if new_index > current_index else current_level | Map GreyNoise intentions to Cortex maliciousness levels . Accept a Cortex level and a GreyNoise intention the return the more malicious of the two . |
36,863 | def summary ( self , raw ) : try : taxonomies = [ ] if raw . get ( 'records' ) : final_level = None taxonomy_data = defaultdict ( int ) for record in raw . get ( 'records' , [ ] ) : name = record . get ( 'name' , 'unknown' ) intention = record . get ( 'intention' , 'unknown' ) taxonomy_data [ name ] += 1 final_level = self . _get_level ( final_level , intention ) if len ( taxonomy_data ) > 1 : taxonomies . append ( self . build_taxonomy ( final_level , 'GreyNoise' , 'entries' , len ( taxonomy_data ) ) ) else : for name , count in taxonomy_data . iteritems ( ) : taxonomies . append ( self . build_taxonomy ( final_level , 'GreyNoise' , name , count ) ) else : taxonomies . append ( self . build_taxonomy ( 'info' , 'GreyNoise' , 'Records' , 'None' ) ) return { "taxonomies" : taxonomies } except Exception as e : self . error ( 'Summary failed\n{}' . format ( e . message ) ) | Return one taxonomy summarizing the reported tags If there is only one tag use it as the predicate If there are multiple tags use entries as the predicate Use the total count as the value Use the most malicious level found |
36,864 | def scan_file ( self , this_file ) : params = { 'apikey' : self . api_key } try : if type ( this_file ) == str and os . path . isfile ( this_file ) : files = { 'file' : ( this_file , open ( this_file , 'rb' ) ) } elif isinstance ( this_file , StringIO . StringIO ) : files = { 'file' : this_file . read ( ) } else : files = { 'file' : this_file } except TypeError as e : return dict ( error = e . message ) try : response = requests . post ( self . base + 'file/scan' , files = files , params = params , proxies = self . proxies ) except requests . RequestException as e : return dict ( error = e . message ) return _return_response_and_status_code ( response ) | Submit a file to be scanned by VirusTotal |
36,865 | def scan_url ( self , this_url ) : params = { 'apikey' : self . api_key , 'url' : this_url } try : response = requests . post ( self . base + 'url/scan' , params = params , proxies = self . proxies ) except requests . RequestException as e : return dict ( error = e . message ) return _return_response_and_status_code ( response ) | Submit a URL to be scanned by VirusTotal . |
36,866 | def get_file ( self , this_hash ) : params = { 'apikey' : self . api_key , 'hash' : this_hash } try : response = requests . get ( self . base + 'file/download' , params = params , proxies = self . proxies ) except requests . RequestException as e : return dict ( error = e . message ) if response . status_code == requests . codes . ok : return response . content elif response . status_code == 403 : return dict ( error = 'You tried to perform calls to functions for which you require a Private API key.' , response_code = response . status_code ) elif response . status_code == 404 : return dict ( error = 'File not found.' , response_code = response . status_code ) else : return dict ( response_code = response . status_code ) | Download a file by its hash . |
36,867 | def get_url_report ( self , this_url , scan = '0' , allinfo = 1 ) : params = { 'apikey' : self . api_key , 'resource' : this_url , 'scan' : scan , 'allinfo' : allinfo } try : response = requests . get ( self . base + 'url/report' , params = params , proxies = self . proxies ) except requests . RequestException as e : return dict ( error = e . message ) return _return_response_and_status_code ( response ) | Get the scan results for a URL . |
36,868 | def get_comments ( self , resource , before = None ) : params = dict ( apikey = self . api_key , resource = resource , before = before ) try : response = requests . get ( self . base + 'comments/get' , params = params , proxies = self . proxies ) except requests . RequestException as e : return dict ( error = e . message ) return _return_response_and_status_code ( response ) | Get comments for a file or URL . |
36,869 | def save_downloaded_file ( filename , save_file_at , file_stream ) : filename = os . path . join ( save_file_at , filename ) with open ( filename , 'wb' ) as f : f . write ( file_stream ) f . flush ( ) | Save Downloaded File to Disk Helper Function |
36,870 | def name ( self ) : return next ( ( self . names . get ( x ) for x in self . _locales if x in self . names ) , None ) | Dict with locale codes as keys and localized name as value |
36,871 | def summary ( self , raw ) : taxonomies = [ ] level = "info" namespace = "Patrowl" if self . service == 'getreport' : if 'risk_level' in raw and raw [ 'risk_level' ] : risk_level = raw [ 'risk_level' ] if risk_level [ 'grade' ] in [ "A" , "B" ] : level = "safe" else : level = "suspicious" taxonomies . append ( self . build_taxonomy ( level , namespace , "Grade" , risk_level [ 'grade' ] ) ) if risk_level [ 'high' ] > 0 : level = "malicious" elif risk_level [ 'medium' ] > 0 or risk_level [ 'low' ] > 0 : level = "suspicious" else : level = "info" taxonomies . append ( self . build_taxonomy ( level , namespace , "Findings" , "{}/{}/{}/{}" . format ( risk_level [ 'high' ] , risk_level [ 'medium' ] , risk_level [ 'low' ] , risk_level [ 'info' ] ) ) ) return { "taxonomies" : taxonomies } | Parse format and return scan summary . |
36,872 | def run ( self ) : try : if self . service == 'getreport' : service_url = '{}/assets/api/v1/details/{}' . format ( self . url , self . get_data ( ) ) headers = { 'Authorization' : 'token {}' . format ( self . api_key ) } response = requests . get ( service_url , headers = headers ) self . report ( response . json ( ) ) else : self . error ( 'Unknown Patrowl service' ) except Exception as e : self . unexpectedError ( e ) | Run the analyzer . |
36,873 | def run ( self ) : kwargs = { 'query' : self . get_data ( ) } if self . data_type == "ip" : kwargs . update ( { 'query_type' : 'ip' } ) elif self . data_type == "network" : kwargs . update ( { 'query_type' : 'network' } ) elif self . data_type == 'autonomous-system' : kwargs . update ( { 'query_type' : 'asn' } ) elif self . data_type == 'port' : kwargs . update ( { 'query_type' : 'port' } ) else : self . notSupported ( ) return False if self . service == 'observations' : response = self . bs . get_observations ( ** kwargs ) self . report ( response ) elif self . service == 'enrichment' : response = self . bs . enrich ( ** kwargs ) self . report ( response ) else : self . report ( { 'error' : 'Invalid service defined.' } ) | Run the process to get observation data from Backscatter . io . |
36,874 | def summary ( self , raw ) : taxonomies = list ( ) level = 'info' namespace = 'Backscatter.io' if self . service == 'observations' : summary = raw . get ( 'results' , dict ( ) ) . get ( 'summary' , dict ( ) ) taxonomies = taxonomies + [ self . build_taxonomy ( level , namespace , 'Observations' , summary . get ( 'observations_count' , 0 ) ) , self . build_taxonomy ( level , namespace , 'IP Addresses' , summary . get ( 'ip_address_count' , 0 ) ) , self . build_taxonomy ( level , namespace , 'Networks' , summary . get ( 'network_count' , 0 ) ) , self . build_taxonomy ( level , namespace , 'AS' , summary . get ( 'autonomous_system_count' , 0 ) ) , self . build_taxonomy ( level , namespace , 'Ports' , summary . get ( 'port_count' , 0 ) ) , self . build_taxonomy ( level , namespace , 'Protocols' , summary . get ( 'protocol_count' , 0 ) ) ] elif self . service == 'enrichment' : summary = raw . get ( 'results' , dict ( ) ) if self . data_type == 'ip' : taxonomies = taxonomies + [ self . build_taxonomy ( level , namespace , 'Network' , summary . get ( 'network' ) ) , self . build_taxonomy ( level , namespace , 'Network Broadcast' , summary . get ( 'network_broadcast' ) ) , self . build_taxonomy ( level , namespace , 'Network Size' , summary . get ( 'network_size' ) ) , self . build_taxonomy ( level , namespace , 'Country' , summary . get ( 'country_name' ) ) , self . build_taxonomy ( level , namespace , 'AS Number' , summary . get ( 'as_num' ) ) , self . build_taxonomy ( level , namespace , 'AS Name' , summary . get ( 'as_name' ) ) , ] elif self . data_type == 'network' : taxonomies = taxonomies + [ self . build_taxonomy ( level , namespace , 'Network Size' , summary . get ( 'network_size' ) ) ] elif self . data_type == 'autonomous-system' : taxonomies = taxonomies + [ self . build_taxonomy ( level , namespace , 'Prefix Count' , summary . get ( 'prefix_count' ) ) , self . build_taxonomy ( level , namespace , 'AS Number' , summary . get ( 'as_num' ) ) , self . build_taxonomy ( level , namespace , 'AS Name' , summary . get ( 'as_name' ) ) ] elif self . data_type == 'port' : for result in raw . get ( 'results' , list ( ) ) : display = "%s (%s)" % ( result . get ( 'service' ) , result . get ( 'protocol' ) ) taxonomies . append ( self . build_taxonomy ( level , namespace , 'Service' , display ) ) else : pass else : pass return { "taxonomies" : taxonomies } | Use the Backscatter . io summary data to create a view . |
36,875 | def decode ( self , offset ) : new_offset = offset + 1 ( ctrl_byte , ) = struct . unpack ( b'!B' , self . _buffer [ offset : new_offset ] ) type_num = ctrl_byte >> 5 if not type_num : ( type_num , new_offset ) = self . _read_extended ( new_offset ) ( size , new_offset ) = self . _size_from_ctrl_byte ( ctrl_byte , new_offset , type_num ) return self . _type_decoder [ type_num ] ( self , size , new_offset ) | Decode a section of the data section starting at offset |
36,876 | def get_sample ( self , samplehash ) : apiurl = '/rest/sample/' if len ( samplehash ) == 32 : apiurl += 'md5/' elif len ( samplehash ) == 40 : apiurl += 'sha1/' elif len ( samplehash ) == 64 : apiurl += 'sha256/' else : raise UnknownHashTypeError ( 'Sample hash has an unknown length.' ) res = self . session . get ( self . url + apiurl + samplehash ) if res . status_code == 200 : return json . loads ( res . text ) else : raise BadResponseError ( 'Response from VMRay was not HTTP 200.' ' Responsecode: {}; Text: {}' . format ( res . status_code , res . text ) ) | Downloads information about a sample using a given hash . |
36,877 | def submit_sample ( self , filepath , filename , tags = [ 'TheHive' ] ) : apiurl = '/rest/sample/submit?sample_file' params = { 'sample_filename_b64enc' : base64 . b64encode ( filename . encode ( 'utf-8' ) ) , 'reanalyze' : self . reanalyze } if tags : params [ 'tags' ] = ',' . join ( tags ) if os . path . isfile ( filepath ) : res = self . session . post ( url = self . url + apiurl , files = [ ( 'sample_file' , open ( filepath , mode = 'rb' ) ) ] , params = params ) if res . status_code == 200 : return json . loads ( res . text ) else : raise BadResponseError ( 'Response from VMRay was not HTTP 200.' ' Responsecode: {}; Text: {}' . format ( res . status_code , res . text ) ) else : raise SampleFileNotFoundError ( 'Given sample file was not found.' ) | Uploads a new sample to VMRay api . Filename gets sent base64 encoded . |
36,878 | def build_results ( self , results ) : self . add_result_subsection ( 'Exploit mitigation techniques' , { 'level' : results . get ( 'Plugins' , { } ) . get ( 'mitigation' , { } ) . get ( 'level' , None ) , 'summary' : results . get ( 'Plugins' , { } ) . get ( 'mitigation' , { } ) . get ( 'summary' , None ) , 'content' : results . get ( 'Plugins' , { } ) . get ( 'mitigation' , { } ) . get ( 'plugin_output' , None ) } ) self . add_result_subsection ( 'Suspicious strings' , { 'level' : results . get ( 'Plugins' , { } ) . get ( 'strings' , { } ) . get ( 'level' , None ) , 'summary' : results . get ( 'Plugins' , { } ) . get ( 'strings' , { } ) . get ( 'summary' , None ) , 'content' : results . get ( 'Plugins' , { } ) . get ( 'strings' , { } ) . get ( 'plugin_output' , None ) } ) self . add_result_subsection ( 'Suspicious imports' , { 'level' : results . get ( 'Plugins' , { } ) . get ( 'imports' , { } ) . get ( 'level' , None ) , 'summary' : results . get ( 'Plugins' , { } ) . get ( 'imports' , { } ) . get ( 'summary' , None ) , 'content' : results . get ( 'Plugins' , { } ) . get ( 'imports' , { } ) . get ( 'plugin_output' , None ) } ) self . add_result_subsection ( 'Packer' , { 'level' : results . get ( 'Plugins' , { } ) . get ( 'packer' , { } ) . get ( 'level' , None ) , 'summary' : results . get ( 'Plugins' , { } ) . get ( 'packer' , { } ) . get ( 'summary' , None ) , 'content' : results . get ( 'Plugins' , { } ) . get ( 'packer' , { } ) . get ( 'plugin_output' , None ) } ) self . add_result_subsection ( 'Clamav' , { 'level' : results . get ( 'Plugins' , { } ) . get ( 'clamav' , { } ) . get ( 'level' , None ) , 'summary' : results . get ( 'Plugins' , { } ) . get ( 'clamav' , { } ) . get ( 'summary' , None ) , 'content' : results . get ( 'Plugins' , { } ) . get ( 'clamav' , { } ) . get ( 'plugin_output' , None ) } ) self . add_result_subsection ( 'Manalyze raw output' , json . dumps ( results , indent = 4 ) ) | Properly format the results |
36,879 | def v4_int_to_packed ( address ) : if address > _BaseV4 . _ALL_ONES : raise ValueError ( 'Address too large for IPv4' ) return Bytes ( struct . pack ( '!I' , address ) ) | The binary representation of this address . |
36,880 | def _get_prefix_length ( number1 , number2 , bits ) : for i in range ( bits ) : if number1 >> i == number2 >> i : return bits - i return 0 | Get the number of leading bits that are same for two numbers . |
36,881 | def _prefix_from_ip_int ( self , ip_int ) : prefixlen = self . _max_prefixlen while prefixlen : if ip_int & 1 : break ip_int >>= 1 prefixlen -= 1 if ip_int == ( 1 << prefixlen ) - 1 : return prefixlen else : raise NetmaskValueError ( 'Bit pattern does not match /1*0*/' ) | Return prefix length from a bitwise netmask . |
36,882 | def _prefix_from_prefix_string ( self , prefixlen_str ) : try : if not _BaseV4 . _DECIMAL_DIGITS . issuperset ( prefixlen_str ) : raise ValueError prefixlen = int ( prefixlen_str ) if not ( 0 <= prefixlen <= self . _max_prefixlen ) : raise ValueError except ValueError : raise NetmaskValueError ( '%s is not a valid prefix length' % prefixlen_str ) return prefixlen | Turn a prefix length string into an integer . |
36,883 | def masked ( self ) : return IPNetwork ( '%s/%d' % ( self . network , self . _prefixlen ) , version = self . _version ) | Return the network object with the host bits masked out . |
36,884 | def _string_from_ip_int ( self , ip_int = None ) : if not ip_int and ip_int != 0 : ip_int = int ( self . _ip ) if ip_int > self . _ALL_ONES : raise ValueError ( 'IPv6 address is too large' ) hex_str = '%032x' % ip_int hextets = [ ] for x in range ( 0 , 32 , 4 ) : hextets . append ( '%x' % int ( hex_str [ x : x + 4 ] , 16 ) ) hextets = self . _compress_hextets ( hextets ) return ':' . join ( hextets ) | Turns a 128 - bit integer into hexadecimal notation . |
36,885 | def scan_file ( self , this_file , this_filename ) : params = { 'api_key' : self . api_key , 'filename' : this_filename } try : files = { 'file' : ( this_file . name , open ( this_file . name , 'rb' ) , 'application/octet-stream' ) } except TypeError as e : return dict ( error = e . message ) try : response = requests . post ( self . base + 'file/upload' , files = files , data = params ) except requests . RequestException as e : return dict ( error = e . message ) return _return_response_and_status_code ( response ) | Submit a file to be scanned by Malwares |
36,886 | def __prepare_body ( self , search_value , search_type = 'url' ) : body = { 'client' : { 'clientId' : self . client_id , 'clientVersion' : self . client_version } } if search_type == 'url' : data = { 'threatTypes' : [ 'MALWARE' , 'SOCIAL_ENGINEERING' , 'UNWANTED_SOFTWARE' , 'POTENTIALLY_HARMFUL_APPLICATION' ] , 'platformTypes' : [ 'ANY_PLATFORM' , 'ALL_PLATFORMS' , 'WINDOWS' , 'LINUX' , 'OSX' , 'ANDROID' , 'IOS' ] , 'threatEntryTypes' : [ 'URL' ] } elif search_type == 'ip' : data = { 'threatTypes' : [ 'MALWARE' ] , 'platformTypes' : [ 'WINDOWS' , 'LINUX' , 'OSX' ] , 'threatEntryTypes' : [ 'IP_RANGE' ] } else : raise SearchTypeNotSupportedError ( 'Currently supported search types are \'url\' and \'ip\'.' ) data [ 'threatEntries' ] = [ { 'url' : search_value } ] body [ 'threatInfo' ] = data return body | Prepares the http body for querying safebrowsing api . Maybe the list need to get adjusted . |
36,887 | def query_rpdns ( self ) : results = requests . get ( 'https://freeapi.robtex.com/pdns/reverse/{}' . format ( self . get_data ( ) ) ) . text . split ( '\r\n' ) jsonresults = [ ] for idx , r in enumerate ( results ) : if len ( r ) > 0 : jsonresults . append ( json . loads ( r ) ) return jsonresults | Queries robtex reverse pdns - api using an ip as parameter |
36,888 | def module_summary ( self ) : suspicious = 0 malicious = 0 count = 0 cve = False taxonomies = [ ] for section in self . results : if section [ 'submodule_section_content' ] [ 'class' ] == 'malicious' : malicious += 1 elif section [ 'submodule_section_content' ] [ 'class' ] == 'suspicious' : suspicious += 1 if 'CVE' in section [ 'submodule_section_content' ] [ 'clsid_description' ] : cve = True count += 1 if malicious > 0 : taxonomies . append ( self . build_taxonomy ( 'malicious' , 'FileInfo' , 'MaliciousRTFObjects' , malicious ) ) if suspicious > 0 : taxonomies . append ( self . build_taxonomy ( 'suspicious' , 'FileInfo' , 'SuspiciousRTFObjects' , suspicious ) ) if cve : taxonomies . append ( self . build_taxonomy ( 'malicious' , 'FileInfo' , 'PossibleCVEExploit' , 'True' ) ) taxonomies . append ( self . build_taxonomy ( 'info' , 'FileInfo' , 'RTFObjects' , count ) ) self . summary [ 'taxonomies' ] = taxonomies return self . summary | Count the malicious and suspicious sections check for CVE description |
36,889 | def search_tor_node ( self , ip ) : data = { } tmp = { } present = datetime . utcnow ( ) . replace ( tzinfo = pytz . utc ) for line in self . _get_raw_data ( ) . splitlines ( ) : params = line . split ( ' ' ) if params [ 0 ] == 'ExitNode' : tmp [ 'node' ] = params [ 1 ] elif params [ 0 ] == 'ExitAddress' : tmp [ 'last_status' ] = params [ 2 ] + 'T' + params [ 3 ] + '+0000' last_status = parse ( tmp [ 'last_status' ] ) if ( self . delta is None or ( present - last_status ) < self . delta ) : data [ params [ 1 ] ] = tmp tmp = { } else : pass return data . get ( ip , { } ) | Lookup an IP address to check if it is a known tor exit node . |
36,890 | def search_hosts ( self , ip ) : c = CensysIPv4 ( api_id = self . __uid , api_secret = self . __api_key ) return c . view ( ip ) | Searches for a host using its ipv4 address |
36,891 | def search_certificate ( self , hash ) : c = CensysCertificates ( api_id = self . __uid , api_secret = self . __api_key ) return c . view ( hash ) | Searches for a specific certificate using its hash |
36,892 | def get_data ( self , datatype , data ) : result = { } params = StopforumspamClient . _set_payload ( datatype , data ) response = self . client . get ( 'https://api.stopforumspam.org/api' , params = params , proxies = self . proxies ) response . raise_for_status ( ) report = response . json ( ) if report [ 'success' ] : data = report [ StopforumspamClient . _type_conversion [ datatype ] ] result = self . _data_conversion ( data ) else : pass return result | Look for an IP address or an email address in the spammer database . |
36,893 | def construct ( cls , faker , path_to_factories = None ) : factory = faker . __class__ ( ) if path_to_factories is not None and os . path . isdir ( path_to_factories ) : for filename in os . listdir ( path_to_factories ) : if os . path . isfile ( filename ) : cls . _resolve ( path_to_factories , filename ) return factory | Create a new factory container . |
36,894 | def define ( self , klass , name = "default" ) : def decorate ( func ) : @ wraps ( func ) def wrapped ( * args , ** kwargs ) : return func ( * args , ** kwargs ) self . register ( klass , func , name = name ) return wrapped return decorate | Define a class with a given set of attributes . |
36,895 | def create_as ( self , klass , name , ** attributes ) : return self . of ( klass , name ) . create ( ** attributes ) | Create an instance of the given model and type and persist it to the database . |
36,896 | def make_as ( self , klass , name , ** attributes ) : return self . of ( klass , name ) . make ( ** attributes ) | Create an instance of the given model and type . |
36,897 | def of ( self , klass , name = "default" ) : return FactoryBuilder ( klass , name , self . _definitions , self . _faker , self . _resolver ) | Create a builder for the given model . |
36,898 | def build ( self , klass , name = "default" , amount = None ) : if amount is None : if isinstance ( name , int ) : amount = name name = "default" else : amount = 1 return self . of ( klass , name ) . times ( amount ) | Makes a factory builder with a specified amount . |
36,899 | def _get_renamed_diff ( self , blueprint , command , column , schema ) : table_diff = self . _get_table_diff ( blueprint , schema ) return self . _set_renamed_columns ( table_diff , command , column ) | Get a new column instance with the new column name . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.