idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
245,500
def full_address ( self ) : addr = "" # if self.building: # addr = addr + "(" + self.building + ") " if self . house_number : addr = addr + self . house_number if self . street_prefix : addr = addr + " " + self . street_prefix if self . street : addr = addr + " " + self . street if self . street_suffix : addr = addr + " " + self . street_suffix if self . apartment : addr = addr + " " + self . apartment if self . city : addr = addr + ", " + self . city if self . state : addr = addr + ", " + self . state if self . zip : addr = addr + " " + self . zip return addr
Print the address in a human readable format
165
8
245,501
def _get_dstk_intersections ( self , address , dstk_address ) : # Normalize both addresses normalized_address = self . _normalize ( address ) normalized_dstk_address = self . _normalize ( dstk_address ) address_uniques = set ( normalized_address ) - set ( normalized_dstk_address ) dstk_address_uniques = set ( normalized_dstk_address ) - set ( normalized_address ) if self . logger : self . logger . debug ( "Address Uniques {0}" . format ( address_uniques ) ) if self . logger : self . logger . debug ( "DSTK Address Uniques {0}" . format ( dstk_address_uniques ) ) return ( len ( address_uniques ) , len ( dstk_address_uniques ) )
Find the unique tokens in the original address and the returned address .
185
13
245,502
def _normalize ( self , address ) : normalized_address = [ ] if self . logger : self . logger . debug ( "Normalizing Address: {0}" . format ( address ) ) for token in address . split ( ) : if token . upper ( ) in self . parser . suffixes . keys ( ) : normalized_address . append ( self . parser . suffixes [ token . upper ( ) ] . lower ( ) ) elif token . upper ( ) in self . parser . suffixes . values ( ) : normalized_address . append ( token . lower ( ) ) elif token . upper ( ) . replace ( '.' , '' ) in self . parser . suffixes . values ( ) : normalized_address . append ( token . lower ( ) . replace ( '.' , '' ) ) elif token . lower ( ) in self . parser . prefixes . keys ( ) : normalized_address . append ( self . parser . prefixes [ token . lower ( ) ] . lower ( ) ) elif token . upper ( ) in self . parser . prefixes . values ( ) : normalized_address . append ( token . lower ( ) [ : - 1 ] ) elif token . upper ( ) + '.' in self . parser . prefixes . values ( ) : normalized_address . append ( token . lower ( ) ) else : normalized_address . append ( token . lower ( ) ) return normalized_address
Normalize prefixes suffixes and other to make matching original to returned easier .
301
16
245,503
def empty ( ) : if not hasattr ( empty , '_instance' ) : empty . _instance = Interval ( AtomicInterval ( OPEN , inf , - inf , OPEN ) ) return empty . _instance
Create an empty set .
45
5
245,504
def from_data ( data , conv = None , pinf = float ( 'inf' ) , ninf = float ( '-inf' ) ) : intervals = [ ] conv = ( lambda v : v ) if conv is None else conv def _convert ( bound ) : if bound == pinf : return inf elif bound == ninf : return - inf else : return conv ( bound ) for item in data : left , lower , upper , right = item intervals . append ( AtomicInterval ( left , _convert ( lower ) , _convert ( upper ) , right ) ) return Interval ( * intervals )
Import an interval from a piece of data .
132
9
245,505
def is_empty ( self ) : return ( self . _lower > self . _upper or ( self . _lower == self . _upper and ( self . _left == OPEN or self . _right == OPEN ) ) )
Test interval emptiness .
48
4
245,506
def to_atomic ( self ) : lower = self . _intervals [ 0 ] . lower left = self . _intervals [ 0 ] . left upper = self . _intervals [ - 1 ] . upper right = self . _intervals [ - 1 ] . right return AtomicInterval ( left , lower , upper , right )
Return the smallest atomic interval containing this interval .
71
9
245,507
def register_graphql_handlers ( app : "Application" , engine_sdl : str = None , engine_schema_name : str = "default" , executor_context : dict = None , executor_http_endpoint : str = "/graphql" , executor_http_methods : List [ str ] = None , engine : Engine = None , subscription_ws_endpoint : Optional [ str ] = None , graphiql_enabled : bool = False , graphiql_options : Optional [ Dict [ str , Any ] ] = None , ) -> "Application" : # pylint: disable=too-many-arguments,too-many-locals if ( not engine_sdl and not engine ) or ( engine and engine_sdl ) : raise Exception ( "an engine OR an engine_sdl should be passed here, not both, not none" ) if not executor_context : executor_context = { } executor_context [ "app" ] = app if not executor_http_methods : executor_http_methods = [ "GET" , "POST" ] if not engine : engine = Engine ( engine_sdl , engine_schema_name ) app [ "ttftt_engine" ] = engine for method in executor_http_methods : try : app . router . add_route ( method , executor_http_endpoint , partial ( getattr ( Handlers , "handle_%s" % method . lower ( ) ) , executor_context , ) , ) except AttributeError : raise Exception ( "Unsupported < %s > http method" % method ) _set_subscription_ws_handler ( app , subscription_ws_endpoint , engine ) _set_graphiql_handler ( app , graphiql_enabled , graphiql_options , executor_http_endpoint , executor_http_methods , subscription_ws_endpoint , ) return app
Register a Tartiflette Engine to an app
432
9
245,508
async def on_shutdown ( app ) : for method in app . get ( "close_methods" , [ ] ) : logger . debug ( "Calling < %s >" , method ) if asyncio . iscoroutinefunction ( method ) : await method ( ) else : method ( )
app SHUTDOWN event handler
64
6
245,509
def _load_from_file ( path ) : config = [ ] try : with open ( path , 'r' ) as config_file : config = yaml . load ( config_file ) [ 'normalizations' ] except EnvironmentError as e : raise ConfigError ( 'Problem while loading file: %s' % e . args [ 1 ] if len ( e . args ) > 1 else e ) except ( TypeError , KeyError ) as e : raise ConfigError ( 'Config file has an unexpected structure: %s' % e ) except yaml . YAMLError : raise ConfigError ( 'Invalid YAML file syntax' ) return config
Load a config file from the given path .
140
9
245,510
def _parse_normalization ( normalization ) : parsed_normalization = None if isinstance ( normalization , dict ) : if len ( normalization . keys ( ) ) == 1 : items = list ( normalization . items ( ) ) [ 0 ] if len ( items ) == 2 : # Two elements tuple # Convert to string if no normalization options if items [ 1 ] and isinstance ( items [ 1 ] , dict ) : parsed_normalization = items else : parsed_normalization = items [ 0 ] elif isinstance ( normalization , STR_TYPE ) : parsed_normalization = normalization return parsed_normalization
Parse a normalization item .
134
7
245,511
def _parse_normalizations ( self , normalizations ) : parsed_normalizations = [ ] if isinstance ( normalizations , list ) : for item in normalizations : normalization = self . _parse_normalization ( item ) if normalization : parsed_normalizations . append ( normalization ) else : raise ConfigError ( 'List expected. Found %s' % type ( normalizations ) ) return parsed_normalizations
Returns a list of parsed normalizations .
89
8
245,512
def initialize_logger ( debug ) : level = logging . DEBUG if debug else logging . INFO logger = logging . getLogger ( 'cucco' ) logger . setLevel ( level ) formatter = logging . Formatter ( '%(asctime)s %(levelname).1s %(message)s' ) console_handler = logging . StreamHandler ( ) console_handler . setLevel ( level ) console_handler . setFormatter ( formatter ) logger . addHandler ( console_handler ) return logger
Set up logger to be used by the library .
112
10
245,513
def batch ( ctx , path , recursive , watch ) : batch = Batch ( ctx . obj [ 'config' ] , ctx . obj [ 'cucco' ] ) if os . path . exists ( path ) : if watch : batch . watch ( path , recursive ) elif os . path . isfile ( path ) : batch . process_file ( path ) else : batch . process_files ( path , recursive ) else : click . echo ( 'Error: Specified path doesn\'t exists' , err = True ) sys . exit ( - 1 )
Normalize files in a path .
122
7
245,514
def normalize ( ctx , text ) : if text : click . echo ( ctx . obj [ 'cucco' ] . normalize ( text ) ) else : for line in sys . stdin : click . echo ( ctx . obj [ 'cucco' ] . normalize ( line ) )
Normalize text or piped input .
67
8
245,515
def cli ( ctx , config , debug , language , verbose ) : ctx . obj = { } try : ctx . obj [ 'config' ] = Config ( normalizations = config , language = language , debug = debug , verbose = verbose ) except ConfigError as e : click . echo ( e . message ) sys . exit ( - 1 ) ctx . obj [ 'cucco' ] = Cucco ( ctx . obj [ 'config' ] )
Cucco allows to apply normalizations to a given text or file . This normalizations include among others removal of accent marks stop words an extra white spaces replacement of punctuation symbols emails emojis etc .
104
42
245,516
def files_generator ( path , recursive ) : if recursive : for ( path , _ , files ) in os . walk ( path ) : for file in files : if not file . endswith ( BATCH_EXTENSION ) : yield ( path , file ) else : for file in os . listdir ( path ) : if ( os . path . isfile ( os . path . join ( path , file ) ) and not file . endswith ( BATCH_EXTENSION ) ) : yield ( path , file )
Yield files found in a given path .
113
9
245,517
def process_file ( self , path ) : if self . _config . verbose : self . _logger . info ( 'Processing file "%s"' , path ) output_path = '%s%s' % ( path , BATCH_EXTENSION ) with open ( output_path , 'w' ) as file : for line in lines_generator ( path ) : file . write ( '%s\n' % self . _cucco . normalize ( line . encode ( ) . decode ( 'utf-8' ) ) ) self . _logger . debug ( 'Created file "%s"' , output_path )
Process a file applying normalizations .
139
7
245,518
def process_files ( self , path , recursive = False ) : self . _logger . info ( 'Processing files in "%s"' , path ) for ( path , file ) in files_generator ( path , recursive ) : if not file . endswith ( BATCH_EXTENSION ) : self . process_file ( os . path . join ( path , file ) )
Apply normalizations over all files in the given directory .
83
11
245,519
def stop_watching ( self ) : self . _watch = False if self . _observer : self . _logger . info ( 'Stopping watcher' ) self . _observer . stop ( ) self . _logger . info ( 'Watcher stopped' )
Stop watching for files .
59
5
245,520
def watch ( self , path , recursive = False ) : self . _logger . info ( 'Initializing watcher for path "%s"' , path ) handler = FileHandler ( self ) self . _observer = Observer ( ) self . _observer . schedule ( handler , path , recursive ) self . _logger . info ( 'Starting watcher' ) self . _observer . start ( ) self . _watch = True try : self . _logger . info ( 'Waiting for file events' ) while self . _watch : time . sleep ( 1 ) except KeyboardInterrupt : # pragma: no cover self . stop_watching ( ) self . _observer . join ( )
Watch for files in a directory and apply normalizations .
149
11
245,521
def _process_event ( self , event ) : if ( not event . is_directory and not event . src_path . endswith ( BATCH_EXTENSION ) ) : self . _logger . info ( 'Detected file change: %s' , event . src_path ) self . _batch . process_file ( event . src_path )
Process received events .
79
4
245,522
def on_created ( self , event ) : self . _logger . debug ( 'Detected create event on watched path: %s' , event . src_path ) self . _process_event ( event )
Function called everytime a new file is created .
46
10
245,523
def on_modified ( self , event ) : self . _logger . debug ( 'Detected modify event on watched path: %s' , event . src_path ) self . _process_event ( event )
Function called everytime a new file is modified .
46
10
245,524
def _parse_normalizations ( normalizations ) : str_type = str if sys . version_info [ 0 ] > 2 else ( str , unicode ) for normalization in normalizations : yield ( normalization , { } ) if isinstance ( normalization , str_type ) else normalization
Parse and yield normalizations .
63
7
245,525
def _parse_stop_words_file ( self , path ) : language = None loaded = False if os . path . isfile ( path ) : self . _logger . debug ( 'Loading stop words in %s' , path ) language = path . split ( '-' ) [ - 1 ] if not language in self . __stop_words : self . __stop_words [ language ] = set ( ) with codecs . open ( path , 'r' , 'UTF-8' ) as file : loaded = True for word in file : self . __stop_words [ language ] . add ( word . strip ( ) ) return loaded
Load stop words from the given path .
137
8
245,526
def normalize ( self , text , normalizations = None ) : for normalization , kwargs in self . _parse_normalizations ( normalizations or self . _config . normalizations ) : try : text = getattr ( self , normalization ) ( text , * * kwargs ) except AttributeError as e : self . _logger . debug ( 'Invalid normalization: %s' , e ) return text
Normalize a given text applying all normalizations .
91
10
245,527
def remove_accent_marks ( text , excluded = None ) : if excluded is None : excluded = set ( ) return unicodedata . normalize ( 'NFKC' , '' . join ( c for c in unicodedata . normalize ( 'NFKD' , text ) if unicodedata . category ( c ) != 'Mn' or c in excluded ) )
Remove accent marks from input text .
81
7
245,528
def replace_characters ( self , text , characters , replacement = '' ) : if not characters : return text characters = '' . join ( sorted ( characters ) ) if characters in self . _characters_regexes : characters_regex = self . _characters_regexes [ characters ] else : characters_regex = re . compile ( "[%s]" % re . escape ( characters ) ) self . _characters_regexes [ characters ] = characters_regex return characters_regex . sub ( replacement , text )
Remove characters from text .
116
5
245,529
def replace_punctuation ( self , text , excluded = None , replacement = '' ) : if excluded is None : excluded = set ( ) elif not isinstance ( excluded , set ) : excluded = set ( excluded ) punct = '' . join ( self . __punctuation . difference ( excluded ) ) return self . replace_characters ( text , characters = punct , replacement = replacement )
Replace punctuation symbols in text .
83
8
245,530
def replace_symbols ( text , form = 'NFKD' , excluded = None , replacement = '' ) : if excluded is None : excluded = set ( ) categories = set ( [ 'Mn' , 'Sc' , 'Sk' , 'Sm' , 'So' ] ) return '' . join ( c if unicodedata . category ( c ) not in categories or c in excluded else replacement for c in unicodedata . normalize ( form , text ) )
Replace symbols in text .
102
6
245,531
def get_idb_graph ( ) : digraph = nx . DiGraph ( ) for function in functions ( ) : for xref in itertools . chain ( function . xrefs_from , function . xrefs_to ) : frm = _try_get_function_start ( xref . frm ) to = _try_get_function_start ( xref . to ) digraph . add_edge ( frm , to ) return digraph
Export IDB to a NetworkX graph .
103
9
245,532
def name ( self ) : return self . TYPES . get ( self . _type , self . TYPES [ idaapi . o_idpspec0 ] )
Name of the xref type .
38
7
245,533
def reg ( self ) : if self . type . is_displ or self . type . is_phrase : size = core . get_native_size ( ) return base . get_register_name ( self . reg_id , size ) if self . type . is_reg : return base . get_register_name ( self . reg_id , self . size ) else : raise exceptions . SarkOperandWithoutReg ( "Operand does not have a register." )
Name of the register used in the operand .
101
10
245,534
def has_reg ( self , reg_name ) : return any ( operand . has_reg ( reg_name ) for operand in self . operands )
Check if a register is used in the instruction .
35
10
245,535
def regs ( self ) : regs = set ( ) for operand in self . operands : if not operand . type . has_reg : continue regs . update ( operand . regs ) return regs
Names of all registers used by the instruction .
48
9
245,536
def _pad ( self , text ) : top_bottom = ( "\n" * self . _padding ) + " " right_left = " " * self . _padding * self . PAD_WIDTH return top_bottom + right_left + text + right_left + top_bottom
Pad the text .
64
4
245,537
def _make_unique_title ( self , title ) : unique_title = title for counter in itertools . count ( ) : unique_title = "{}-{}" . format ( title , counter ) if not idaapi . find_tform ( unique_title ) : break return unique_title
Make the title unique .
66
5
245,538
def _get_handler ( self , node_id ) : handler = self . _get_attrs ( node_id ) . get ( self . HANDLER , self . _default_handler ) # Here we make sure the handler is an instance of `BasicNodeHandler` or inherited # types. While generally being bad Python practice, we still need it here as an # invalid handler can cause IDA to crash. if not isinstance ( handler , BasicNodeHandler ) : idaapi . msg ( ( "Invalid handler for node {}: {}. All handlers must inherit from" "`BasicNodeHandler`." ) . format ( node_id , handler ) ) handler = self . _default_handler return handler
Get the handler of a given node .
150
8
245,539
def _OnNodeInfo ( self , node_id ) : handler , value , attrs = self . _get_handling_triplet ( node_id ) frame_color = handler . on_frame_color ( value , attrs ) node_info = idaapi . node_info_t ( ) if frame_color is not None : node_info . frame_color = frame_color flags = node_info . get_flags_for_valid ( ) self . SetNodeInfo ( node_id , node_info , flags )
Sets the node info based on its attributes .
117
10
245,540
def get_string ( ea ) : # We get the item-head because the `GetStringType` function only works on the head of an item. string_type = idc . GetStringType ( idaapi . get_item_head ( ea ) ) if string_type is None : raise exceptions . SarkNoString ( "No string at 0x{:08X}" . format ( ea ) ) string = idc . GetString ( ea , strtype = string_type ) if not string : raise exceptions . SarkNoString ( "No string at 0x{:08X}" . format ( ea ) ) return string
Read the string at the given ea .
139
9
245,541
def copy_current_file_offset ( ) : start , end = sark . get_selection ( ) try : file_offset = sark . core . get_fileregion_offset ( start ) clipboard . copy ( "0x{:08X}" . format ( file_offset ) ) except sark . exceptions . NoFileOffset : message ( "The current address cannot be mapped to a valid offset of the input file." )
Get the file - offset mapped to the current address .
92
11
245,542
def fix_addresses ( start = None , end = None ) : if start in ( None , idaapi . BADADDR ) : start = idaapi . cvar . inf . minEA if end in ( None , idaapi . BADADDR ) : end = idaapi . cvar . inf . maxEA return start , end
Set missing addresses to start and end of IDB .
75
11
245,543
def set_name ( address , name , anyway = False ) : success = idaapi . set_name ( address , name , idaapi . SN_NOWARN | idaapi . SN_NOCHECK ) if success : return if anyway : success = idaapi . do_name_anyway ( address , name ) if success : return raise exceptions . SarkSetNameFailed ( "Failed renaming 0x{:08X} to {!r}." . format ( address , name ) ) raise exceptions . SarkErrorNameAlreadyExists ( "Can't rename 0x{:08X}. Name {!r} already exists." . format ( address , name ) )
Set the name of an address .
147
7
245,544
def is_same_function ( ea1 , ea2 ) : func1 = idaapi . get_func ( ea1 ) func2 = idaapi . get_func ( ea2 ) # This is bloated code. `None in (func1, func2)` will not work because of a # bug in IDAPython in the way functions are compared. if any ( func is None for func in ( func1 , func2 ) ) : return False return func1 . startEA == func2 . startEA
Are both addresses in the same function?
114
8
245,545
def get_nx_graph ( ea ) : nx_graph = networkx . DiGraph ( ) func = idaapi . get_func ( ea ) flowchart = FlowChart ( func ) for block in flowchart : # Make sure all nodes are added (including edge-less nodes) nx_graph . add_node ( block . startEA ) for pred in block . preds ( ) : nx_graph . add_edge ( pred . startEA , block . startEA ) for succ in block . succs ( ) : nx_graph . add_edge ( block . startEA , succ . startEA ) return nx_graph
Convert an IDA flowchart to a NetworkX graph .
142
13
245,546
def codeblocks ( start = None , end = None , full = True ) : if full : for function in functions ( start , end ) : fc = FlowChart ( f = function . func_t ) for block in fc : yield block else : start , end = fix_addresses ( start , end ) for code_block in FlowChart ( bounds = ( start , end ) ) : yield code_block
Get all CodeBlock s in a given range .
88
10
245,547
def struct_member_error ( err , sid , name , offset , size ) : exception , msg = STRUCT_ERROR_MAP [ err ] struct_name = idc . GetStrucName ( sid ) return exception ( ( 'AddStructMember(struct="{}", member="{}", offset={}, size={}) ' 'failed: {}' ) . format ( struct_name , name , offset , size , msg ) )
Create and format a struct member exception .
94
8
245,548
def create_struct ( name ) : sid = idc . GetStrucIdByName ( name ) if sid != idaapi . BADADDR : # The struct already exists. raise exceptions . SarkStructAlreadyExists ( "A struct names {!r} already exists." . format ( name ) ) sid = idc . AddStrucEx ( - 1 , name , 0 ) if sid == idaapi . BADADDR : raise exceptions . SarkStructCreationFailed ( "Struct creation failed." ) return sid
Create a structure .
113
4
245,549
def get_struct ( name ) : sid = idc . GetStrucIdByName ( name ) if sid == idaapi . BADADDR : raise exceptions . SarkStructNotFound ( ) return sid
Get a struct by it s name .
45
8
245,550
def get_common_register ( start , end ) : registers = defaultdict ( int ) for line in lines ( start , end ) : insn = line . insn for operand in insn . operands : if not operand . type . has_phrase : continue if not operand . base : continue register_name = operand . base registers [ register_name ] += 1 return max ( registers . iteritems ( ) , key = operator . itemgetter ( 1 ) ) [ 0 ]
Get the register most commonly used in accessing structs .
106
11
245,551
def _enum_member_error ( err , eid , name , value , bitmask ) : exception , msg = ENUM_ERROR_MAP [ err ] enum_name = idaapi . get_enum_name ( eid ) return exception ( ( 'add_enum_member(enum="{}", member="{}", value={}, bitmask=0x{:08X}) ' 'failed: {}' ) . format ( enum_name , name , value , bitmask , msg ) )
Format enum member error .
109
5
245,552
def _get_enum ( name ) : eid = idaapi . get_enum ( name ) if eid == idaapi . BADADDR : raise exceptions . EnumNotFound ( 'Enum "{}" does not exist.' . format ( name ) ) return eid
Get an existing enum ID
60
5
245,553
def add_enum ( name = None , index = None , flags = idaapi . hexflag ( ) , bitfield = False ) : if name is not None : with ignored ( exceptions . EnumNotFound ) : _get_enum ( name ) raise exceptions . EnumAlreadyExists ( ) if index is None or index < 0 : index = idaapi . get_enum_qty ( ) eid = idaapi . add_enum ( index , name , flags ) if eid == idaapi . BADADDR : raise exceptions . EnumCreationFailed ( 'Failed creating enum "{}"' . format ( name ) ) if bitfield : idaapi . set_enum_bf ( eid , bitfield ) return Enum ( eid = eid )
Create a new enum .
170
5
245,554
def _add_enum_member ( enum , name , value , bitmask = DEFMASK ) : error = idaapi . add_enum_member ( enum , name , value , bitmask ) if error : raise _enum_member_error ( error , enum , name , value , bitmask )
Add an enum member .
66
5
245,555
def _iter_bitmasks ( eid ) : bitmask = idaapi . get_first_bmask ( eid ) yield bitmask while bitmask != DEFMASK : bitmask = idaapi . get_next_bmask ( eid , bitmask ) yield bitmask
Iterate all bitmasks in a given enum .
64
11
245,556
def _iter_enum_member_values ( eid , bitmask ) : value = idaapi . get_first_enum_member ( eid , bitmask ) yield value while value != DEFMASK : value = idaapi . get_next_enum_member ( eid , value , bitmask ) yield value
Iterate member values with given bitmask inside the enum
71
11
245,557
def _iter_serial_enum_member ( eid , value , bitmask ) : cid , serial = idaapi . get_first_serial_enum_member ( eid , value , bitmask ) while cid != idaapi . BADNODE : yield cid , serial cid , serial = idaapi . get_next_serial_enum_member ( cid , serial )
Iterate serial and CID of enum members with given value and bitmask .
87
16
245,558
def _iter_enum_constant_ids ( eid ) : for bitmask in _iter_bitmasks ( eid ) : for value in _iter_enum_member_values ( eid , bitmask ) : for cid , serial in _iter_serial_enum_member ( eid , value , bitmask ) : yield cid
Iterate the constant IDs of all members in the given enum
76
12
245,559
def add ( self , name , value , bitmask = DEFMASK ) : _add_enum_member ( self . _eid , name , value , bitmask )
Add an enum member
38
4
245,560
def remove ( self , name ) : member = self [ name ] serial = member . serial value = member . value bmask = member . bmask success = idaapi . del_enum_member ( self . _eid , value , serial , bmask ) if not success : raise exceptions . CantDeleteEnumMember ( "Can't delete enum member {!r}." . format ( name ) )
Remove an enum member by name
86
6
245,561
def name ( self , name ) : success = idaapi . set_enum_name ( self . eid , name ) if not success : raise exceptions . CantRenameEnum ( "Cant rename enum {!r} to {!r}." . format ( self . name , name ) )
Set the enum name .
65
5
245,562
def name ( self , name ) : success = idaapi . set_enum_member_name ( self . cid , name ) if not success : raise exceptions . CantRenameEnumMember ( "Failed renaming {!r} to {!r}. Does the name exist somewhere else?" . format ( self . name , name ) )
Set the member name .
74
5
245,563
def functions ( start = None , end = None ) : start , end = fix_addresses ( start , end ) for func_t in idautils . Functions ( start , end ) : yield Function ( func_t )
Get all functions in range .
48
6
245,564
def xrefs_from ( self ) : for line in self . lines : for xref in line . xrefs_from : if xref . type . is_flow : continue if xref . to in self and xref . iscode : continue yield xref
Xrefs from the function .
58
7
245,565
def set_name ( self , name , anyway = False ) : set_name ( self . startEA , name , anyway = anyway )
Set Function Name .
29
4
245,566
def color ( self ) : color = idc . GetColor ( self . ea , idc . CIC_FUNC ) if color == 0xFFFFFFFF : return None return color
Function color in IDA View
40
6
245,567
def color ( self , color ) : if color is None : color = 0xFFFFFFFF idc . SetColor ( self . ea , idc . CIC_FUNC , color )
Function Color in IDA View .
41
7
245,568
def lines ( start = None , end = None , reverse = False , selection = False ) : if selection : start , end = get_selection ( ) else : start , end = fix_addresses ( start , end ) if not reverse : item = idaapi . get_item_head ( start ) while item < end : yield Line ( item ) item += idaapi . get_item_size ( item ) else : # if reverse: item = idaapi . get_item_head ( end - 1 ) while item >= start : yield Line ( item ) item = idaapi . get_item_head ( item - 1 )
Iterate lines in range .
137
6
245,569
def type ( self ) : properties = { self . is_code : "code" , self . is_data : "data" , self . is_string : "string" , self . is_tail : "tail" , self . is_unknown : "unknown" } for k , v in properties . items ( ) : if k : return v
return the type of the Line
75
6
245,570
def color ( self ) : color = idc . GetColor ( self . ea , idc . CIC_ITEM ) if color == 0xFFFFFFFF : return None return color
Line color in IDA View
40
6
245,571
def color ( self , color ) : if color is None : color = 0xFFFFFFFF idc . SetColor ( self . ea , idc . CIC_ITEM , color )
Line Color in IDA View .
41
7
245,572
def capture_widget ( widget , path = None ) : if use_qt5 : pixmap = widget . grab ( ) else : pixmap = QtGui . QPixmap . grabWidget ( widget ) if path : pixmap . save ( path ) else : image_buffer = QtCore . QBuffer ( ) image_buffer . open ( QtCore . QIODevice . ReadWrite ) pixmap . save ( image_buffer , "PNG" ) return image_buffer . data ( ) . data ( )
Grab an image of a Qt widget
116
7
245,573
def get_widget ( title ) : tform = idaapi . find_tform ( title ) if not tform : raise exceptions . FormNotFound ( "No form titled {!r} found." . format ( title ) ) return form_to_widget ( tform )
Get the Qt widget of the IDA window with the given title .
60
14
245,574
def get_window ( ) : tform = idaapi . get_current_tform ( ) # Required sometimes when closing IDBs and not IDA. if not tform : tform = idaapi . find_tform ( "Output window" ) widget = form_to_widget ( tform ) window = widget . window ( ) return window
Get IDA s top level window .
76
8
245,575
def add_menu ( self , name ) : if name in self . _menus : raise exceptions . MenuAlreadyExists ( "Menu name {!r} already exists." . format ( name ) ) menu = self . _menu . addMenu ( name ) self . _menus [ name ] = menu
Add a top - level menu .
65
7
245,576
def remove_menu ( self , name ) : if name not in self . _menus : raise exceptions . MenuNotFound ( "Menu {!r} was not found. It might be deleted, or belong to another menu manager." . format ( name ) ) self . _menu . removeAction ( self . _menus [ name ] . menuAction ( ) ) del self . _menus [ name ]
Remove a top - level menu .
86
7
245,577
def clear ( self ) : for menu in self . _menus . itervalues ( ) : self . _menu . removeAction ( menu . menuAction ( ) ) self . _menus = { }
Clear all menus created by this manager .
45
8
245,578
def get_by_flags ( self , flags ) : for reg in self . _reg_infos : if reg . flags & flags == flags : yield reg
Iterate all register infos matching the given flags .
34
11
245,579
def get_single_by_flags ( self , flags ) : regs = list ( self . get_by_flags ( flags ) ) if len ( regs ) != 1 : raise ValueError ( "Flags do not return unique resigter. {!r}" , regs ) return regs [ 0 ]
Get the register info matching the flag . Raises ValueError if more than one are found .
67
19
245,580
def segments ( seg_type = None ) : for index in xrange ( idaapi . get_segm_qty ( ) ) : seg = Segment ( index = index ) if ( seg_type is None ) or ( seg . type == seg_type ) : yield Segment ( index = index )
Iterate segments based on type
72
6
245,581
def next ( self ) : seg = Segment ( segment_t = idaapi . get_next_seg ( self . ea ) ) if seg . ea <= self . ea : raise exceptions . NoMoreSegments ( "This is the last segment. No segments exist after it." ) return seg
Get the next segment .
70
5
245,582
def prev ( self ) : seg = Segment ( segment_t = idaapi . get_prev_seg ( self . ea ) ) if seg . ea >= self . ea : raise exceptions . NoMoreSegments ( "This is the first segment. no segments exist before it." ) return seg
Get the previous segment .
70
5
245,583
def get_ecosystem_solver ( ecosystem_name , parser_kwargs = None , fetcher_kwargs = None ) : from . python import PythonSolver if ecosystem_name . lower ( ) == "pypi" : source = Source ( url = "https://pypi.org/simple" , warehouse_api_url = "https://pypi.org/pypi" , warehouse = True ) return PythonSolver ( parser_kwargs , fetcher_kwargs = { "source" : source } ) raise NotImplementedError ( "Unknown ecosystem: {}" . format ( ecosystem_name ) )
Get Solver subclass instance for particular ecosystem .
139
9
245,584
def check ( self , version ) : # Ignore PyDocStyleBear def _compare_spec ( spec ) : if len ( spec ) == 1 : spec = ( "=" , spec [ 0 ] ) token = Tokens . operators . index ( spec [ 0 ] ) comparison = compare_version ( version , spec [ 1 ] ) if token in [ Tokens . EQ1 , Tokens . EQ2 ] : return comparison == 0 elif token == Tokens . GT : return comparison == 1 elif token == Tokens . LT : return comparison == - 1 elif token == Tokens . GTE : return comparison >= 0 elif token == Tokens . LTE : return comparison <= 0 elif token == Tokens . NEQ : return comparison != 0 else : raise ValueError ( "Invalid comparison token" ) results , intermediaries = False , False for spec in self . spec : if isinstance ( spec , list ) : intermediary = True for sub in spec : intermediary &= _compare_spec ( sub ) intermediaries |= intermediary elif isinstance ( spec , tuple ) : results |= _compare_spec ( spec ) return results or intermediaries
Check if version fits into our dependency specification .
237
9
245,585
def solve ( self , dependencies , graceful = True , all_versions = False ) : # Ignore PyDocStyleBear def _compare_version_index_url ( v1 , v2 ) : """Get a wrapper around compare version to omit index url when sorting.""" return compare_version ( v1 [ 0 ] , v2 [ 0 ] ) solved = { } for dep in self . dependency_parser . parse ( dependencies ) : _LOGGER . debug ( "Fetching releases for: {}" . format ( dep ) ) name , releases = self . release_fetcher . fetch_releases ( dep . name ) if name in solved : raise SolverException ( "Dependency: {} is listed multiple times" . format ( name ) ) if not releases : if graceful : _LOGGER . info ( "No releases found for package %s" , dep . name ) else : raise SolverException ( "No releases found for package {}" . format ( dep . name ) ) releases = [ release for release in releases if release in dep ] matching = sorted ( releases , key = cmp_to_key ( _compare_version_index_url ) ) _LOGGER . debug ( " matching: %s" , matching ) if all_versions : solved [ name ] = matching else : if not matching : solved [ name ] = None else : if self . _highest_dependency_version : solved [ name ] = matching [ - 1 ] else : solved [ name ] = matching [ 0 ] return solved
Solve dependencies against upstream repository .
323
7
245,586
def pip_compile ( * packages : str ) : result = None packages = "\n" . join ( packages ) with tempfile . TemporaryDirectory ( ) as tmp_dirname , cwd ( tmp_dirname ) : with open ( "requirements.in" , "w" ) as requirements_file : requirements_file . write ( packages ) runner = CliRunner ( ) try : result = runner . invoke ( cli , [ "requirements.in" ] , catch_exceptions = False ) except Exception as exc : raise ThothPipCompileError ( str ( exc ) ) from exc if result . exit_code != 0 : error_msg = ( f"pip-compile returned non-zero ({result.exit_code:d}) " f"output: {result.output_bytes.decode():s}" ) raise ThothPipCompileError ( error_msg ) return result . output_bytes . decode ( )
Run pip - compile to pin down packages also resolve their transitive dependencies .
205
15
245,587
def _print_version ( ctx , _ , value ) : if not value or ctx . resilient_parsing : return click . echo ( analyzer_version ) ctx . exit ( )
Print solver version and exit .
43
7
245,588
def cli ( ctx = None , verbose = 0 ) : if ctx : ctx . auto_envvar_prefix = "THOTH_SOLVER" if verbose : _LOG . setLevel ( logging . DEBUG ) _LOG . debug ( "Debug mode is on" )
Thoth solver command line interface .
63
8
245,589
def pypi ( click_ctx , requirements , index = None , python_version = 3 , exclude_packages = None , output = None , subgraph_check_api = None , no_transitive = True , no_pretty = False , ) : requirements = [ requirement . strip ( ) for requirement in requirements . split ( "\\n" ) if requirement ] if not requirements : _LOG . error ( "No requirements specified, exiting" ) sys . exit ( 1 ) if not subgraph_check_api : _LOG . info ( "No subgraph check API provided, no queries will be done for dependency subgraphs that should be avoided" ) # Ignore PycodestyleBear (E501) result = resolve_python ( requirements , index_urls = index . split ( "," ) if index else ( "https://pypi.org/simple" , ) , python_version = int ( python_version ) , transitive = not no_transitive , exclude_packages = set ( map ( str . strip , ( exclude_packages or "" ) . split ( "," ) ) ) , subgraph_check_api = subgraph_check_api , ) print_command_result ( click_ctx , result , analyzer = analyzer_name , analyzer_version = analyzer_version , output = output or "-" , pretty = not no_pretty , )
Manipulate with dependency requirements using PyPI .
295
10
245,590
def _create_entry ( entry : dict , source : Source = None ) -> dict : entry [ "package_name" ] = entry [ "package" ] . pop ( "package_name" ) entry [ "package_version" ] = entry [ "package" ] . pop ( "installed_version" ) if source : entry [ "index_url" ] = source . url entry [ "sha256" ] = [ ] for item in source . get_package_hashes ( entry [ "package_name" ] , entry [ "package_version" ] ) : entry [ "sha256" ] . append ( item [ "sha256" ] ) entry . pop ( "package" ) for dependency in entry [ "dependencies" ] : dependency . pop ( "key" , None ) dependency . pop ( "installed_version" , None ) return entry
Filter and normalize the output of pipdeptree entry .
184
13
245,591
def _get_environment_details ( python_bin : str ) -> list : cmd = "{} -m pipdeptree --json" . format ( python_bin ) output = run_command ( cmd , is_json = True ) . stdout return [ _create_entry ( entry ) for entry in output ]
Get information about packages in environment where packages get installed .
68
11
245,592
def _should_resolve_subgraph ( subgraph_check_api : str , package_name : str , package_version : str , index_url : str ) -> bool : _LOGGER . info ( "Checking if the given dependency subgraph for package %r in version %r from index %r should be resolved" , package_name , package_version , index_url , ) response = requests . get ( subgraph_check_api , params = { "package_name" : package_name , "package_version" : package_version , "index_url" : index_url } , ) if response . status_code == 200 : return True elif response . status_code == 208 : # This is probably not the correct HTTP status code to be used here, but which one should be used? return False response . raise_for_status ( ) raise ValueError ( "Unreachable code - subgraph check API responded with unknown HTTP status " "code %s for package %r in version %r from index %r" , package_name , package_version , index_url , )
Ask the given subgraph check API if the given package in the given version should be included in the resolution .
238
22
245,593
def _install_requirement ( python_bin : str , package : str , version : str = None , index_url : str = None , clean : bool = True ) -> None : previous_version = _pipdeptree ( python_bin , package ) try : cmd = "{} -m pip install --force-reinstall --no-cache-dir --no-deps {}" . format ( python_bin , quote ( package ) ) if version : cmd += "=={}" . format ( quote ( version ) ) if index_url : cmd += ' --index-url "{}" ' . format ( quote ( index_url ) ) # Supply trusted host by default so we do not get errors - it safe to # do it here as package indexes are managed by Thoth. trusted_host = urlparse ( index_url ) . netloc cmd += " --trusted-host {}" . format ( trusted_host ) _LOGGER . debug ( "Installing requirement %r in version %r" , package , version ) run_command ( cmd ) yield finally : if clean : _LOGGER . debug ( "Removing installed package %r" , package ) cmd = "{} -m pip uninstall --yes {}" . format ( python_bin , quote ( package ) ) result = run_command ( cmd , raise_on_error = False ) if result . return_code != 0 : _LOGGER . warning ( "Failed to restore previous environment by removing package %r (installed version %r), " "the error is not fatal but can affect future actions: %s" , package , version , result . stderr , ) _LOGGER . debug ( "Restoring previous environment setup after installation of %r (%s)" , package , previous_version ) if previous_version : cmd = "{} -m pip install --force-reinstall --no-cache-dir --no-deps {}=={}" . format ( python_bin , quote ( package ) , quote ( previous_version [ "package" ] [ "installed_version" ] ) ) result = run_command ( cmd , raise_on_error = False ) if result . return_code != 0 : _LOGGER . warning ( "Failed to restore previous environment for package %r (installed version %r), " ", the error is not fatal but can affect future actions (previous version: %r): %s" , package , version , previous_version , result . stderr , )
Install requirements specified using suggested pip binary .
529
8
245,594
def _pipdeptree ( python_bin , package_name : str = None , warn : bool = False ) -> typing . Optional [ dict ] : cmd = "{} -m pipdeptree --json" . format ( python_bin ) _LOGGER . debug ( "Obtaining pip dependency tree using: %r" , cmd ) output = run_command ( cmd , is_json = True ) . stdout if not package_name : return output for entry in output : # In some versions pipdeptree does not work with --packages flag, do the logic on out own. # TODO: we should probably do difference of reference this output and original environment if entry [ "package" ] [ "key" ] . lower ( ) == package_name . lower ( ) : return entry # The given package was not found. if warn : _LOGGER . warning ( "Package %r was not found in pipdeptree output %r" , package_name , output ) return None
Get pip dependency tree by executing pipdeptree tool .
212
12
245,595
def _get_dependency_specification ( dep_spec : typing . List [ tuple ] ) -> str : return "," . join ( dep_range [ 0 ] + dep_range [ 1 ] for dep_range in dep_spec )
Get string representation of dependency specification as provided by PythonDependencyParser .
52
15
245,596
def resolve ( requirements : typing . List [ str ] , index_urls : list = None , python_version : int = 3 , exclude_packages : set = None , transitive : bool = True , subgraph_check_api : str = None , ) -> dict : assert python_version in ( 2 , 3 ) , "Unknown Python version" if subgraph_check_api and not transitive : _LOGGER . error ( "The check against subgraph API cannot be done if no transitive dependencies are resolved" ) sys . exit ( 2 ) python_bin = "python3" if python_version == 3 else "python2" run_command ( "virtualenv -p python3 venv" ) python_bin = "venv/bin/" + python_bin run_command ( "{} -m pip install pipdeptree" . format ( python_bin ) ) environment_details = _get_environment_details ( python_bin ) result = { "tree" : [ ] , "errors" : [ ] , "unparsed" : [ ] , "unresolved" : [ ] , "environment" : environment_details } all_solvers = [ ] for index_url in index_urls : source = Source ( index_url ) all_solvers . append ( PythonSolver ( fetcher_kwargs = { "source" : source } ) ) for solver in all_solvers : solver_result = _do_resolve_index ( python_bin = python_bin , solver = solver , all_solvers = all_solvers , requirements = requirements , exclude_packages = exclude_packages , transitive = transitive , subgraph_check_api = subgraph_check_api , ) result [ "tree" ] . extend ( solver_result [ "tree" ] ) result [ "errors" ] . extend ( solver_result [ "errors" ] ) result [ "unparsed" ] . extend ( solver_result [ "unparsed" ] ) result [ "unresolved" ] . extend ( solver_result [ "unresolved" ] ) return result
Resolve given requirements for the given Python version .
466
10
245,597
def fetch_releases ( self , package_name ) : package_name = self . source . normalize_package_name ( package_name ) releases = self . source . get_package_versions ( package_name ) releases_with_index_url = [ ( item , self . index_url ) for item in releases ] return package_name , releases_with_index_url
Fetch package and index_url for a package_name .
83
13
245,598
def parse_python ( spec ) : # Ignore PyDocStyleBear def _extract_op_version ( spec ) : # https://www.python.org/dev/peps/pep-0440/#compatible-release if spec . operator == "~=" : version = spec . version . split ( "." ) if len ( version ) in { 2 , 3 , 4 } : if len ( version ) in { 3 , 4 } : del version [ - 1 ] # will increase the last but one in next line version [ - 1 ] = str ( int ( version [ - 1 ] ) + 1 ) else : raise ValueError ( "%r must not be used with %r" % ( spec . operator , spec . version ) ) return [ ( ">=" , spec . version ) , ( "<" , "." . join ( version ) ) ] # Trailing .* is permitted per # https://www.python.org/dev/peps/pep-0440/#version-matching elif spec . operator == "==" and spec . version . endswith ( ".*" ) : try : result = check_output ( [ "/usr/bin/semver-ranger" , spec . version ] , universal_newlines = True ) . strip ( ) gte , lt = result . split ( ) return [ ( ">=" , gte . lstrip ( ">=" ) ) , ( "<" , lt . lstrip ( "<" ) ) ] except ValueError : _LOGGER . warning ( "couldn't resolve ==%s" , spec . version ) return spec . operator , spec . version # https://www.python.org/dev/peps/pep-0440/#arbitrary-equality # Use of this operator is heavily discouraged, so just convert it to 'Version matching' elif spec . operator == "===" : return "==" , spec . version else : return spec . operator , spec . version def _get_pip_spec ( requirements ) : """There is no `specs` field In Pip 8+, take info from `specifier` field.""" if hasattr ( requirements , "specs" ) : return requirements . specs elif hasattr ( requirements , "specifier" ) : specs = [ _extract_op_version ( spec ) for spec in requirements . specifier ] if len ( specs ) == 0 : # TODO: I'm not sure with this one # we should probably return None instead and let pip deal with this specs = [ ( ">=" , "0.0.0" ) ] return specs _LOGGER . info ( "Parsing dependency %r" , spec ) # create a temporary file and store the spec there since # `parse_requirements` requires a file with NamedTemporaryFile ( mode = "w+" , suffix = "pysolve" ) as f : f . write ( spec ) f . flush ( ) parsed = parse_requirements ( f . name , session = f . name ) dependency = [ Dependency ( x . name , _get_pip_spec ( x . req ) ) for x in parsed ] . pop ( ) return dependency
Parse PyPI specification of a single dependency .
682
10
245,599
def get ( obj ) : if not isinstance ( obj , bytes ) : raise TypeError ( "object type must be bytes" ) info = { "type" : dict ( ) , "extension" : dict ( ) , "mime" : dict ( ) } stream = " " . join ( [ '{:02X}' . format ( byte ) for byte in obj ] ) for element in data : for signature in element [ "signature" ] : offset = element [ "offset" ] * 2 + element [ "offset" ] if signature == stream [ offset : len ( signature ) + offset ] : for key in [ "type" , "extension" , "mime" ] : info [ key ] [ element [ key ] ] = len ( signature ) for key in [ "type" , "extension" , "mime" ] : info [ key ] = [ element for element in sorted ( info [ key ] , key = info [ key ] . get , reverse = True ) ] return Info ( info [ "type" ] , info [ "extension" ] , info [ "mime" ] )
Determines file format and picks suitable file types extensions and MIME types
241
15