idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
46,800
def create_superuser ( self , email , password , ** extra_fields ) : extra_fields . setdefault ( 'is_staff' , True ) extra_fields . setdefault ( 'is_superuser' , True ) if extra_fields . get ( 'is_staff' ) is not True : raise ValueError ( 'Superuser must have is_staff=True.' ) if extra_fields . get ( 'is_superuser' ) is not True : raise ValueError ( 'Superuser must have is_superuser=True.' ) return self . _create_user ( email , password , ** extra_fields )
Save new User with is_staff and is_superuser set to True
46,801
def load_file_contents ( file_path , as_list = True ) : abs_file_path = join ( HERE , file_path ) with open ( abs_file_path , encoding = 'utf-8' ) as file_pointer : if as_list : return file_pointer . read ( ) . splitlines ( ) return file_pointer . read ( )
Load file as string or list
46,802
def clean_password2 ( self ) : password1 = self . cleaned_data . get ( 'password1' ) password2 = self . cleaned_data . get ( 'password2' ) if password1 and password2 and password1 != password2 : self . add_error ( 'password1' , forms . ValidationError ( self . error_messages [ 'password_mismatch' ] , code = 'password_mismatch' , ) ) return password2
Check wether password 1 and password 2 are equivalent
46,803
def _post_clean ( self ) : super ( ) . _post_clean ( ) password = self . cleaned_data . get ( 'password1' ) if password : try : password_validation . validate_password ( password , self . instance ) except ValidationError as error : self . add_error ( 'password1' , error )
Run password validaton after clean methods
46,804
def clean ( self ) : super ( ) . clean ( ) self . email = self . __class__ . objects . normalize_email ( self . email )
Override default clean method to normalize email .
46,805
def normalize_feature_inputs ( ctx , param , value ) : for feature_like in value or ( '-' , ) : try : with click . open_file ( feature_like ) as src : for feature in iter_features ( iter ( src ) ) : yield feature except IOError : coords = list ( coords_from_query ( feature_like ) ) yield { 'type' : 'Feature' , 'properties' : { } , 'geometry' : { 'type' : 'Point' , 'coordinates' : coords } }
Click callback that normalizes feature input values .
46,806
def iter_features ( geojsonfile , func = None ) : func = func or ( lambda x : x ) first_line = next ( geojsonfile ) if first_line . startswith ( u'\x1e' ) : text_buffer = first_line . strip ( u'\x1e' ) for line in geojsonfile : if line . startswith ( u'\x1e' ) : if text_buffer : obj = json . loads ( text_buffer ) if 'coordinates' in obj : obj = to_feature ( obj ) newfeat = func ( obj ) if newfeat : yield newfeat text_buffer = line . strip ( u'\x1e' ) else : text_buffer += line else : obj = json . loads ( text_buffer ) if 'coordinates' in obj : obj = to_feature ( obj ) newfeat = func ( obj ) if newfeat : yield newfeat else : try : obj = json . loads ( first_line ) if obj [ 'type' ] == 'Feature' : newfeat = func ( obj ) if newfeat : yield newfeat for line in geojsonfile : newfeat = func ( json . loads ( line ) ) if newfeat : yield newfeat elif obj [ 'type' ] == 'FeatureCollection' : for feat in obj [ 'features' ] : newfeat = func ( feat ) if newfeat : yield newfeat elif 'coordinates' in obj : newfeat = func ( to_feature ( obj ) ) if newfeat : yield newfeat for line in geojsonfile : newfeat = func ( to_feature ( json . loads ( line ) ) ) if newfeat : yield newfeat except ValueError : text = "" . join ( chain ( [ first_line ] , geojsonfile ) ) obj = json . loads ( text ) if obj [ 'type' ] == 'Feature' : newfeat = func ( obj ) if newfeat : yield newfeat elif obj [ 'type' ] == 'FeatureCollection' : for feat in obj [ 'features' ] : newfeat = func ( feat ) if newfeat : yield newfeat elif 'coordinates' in obj : newfeat = func ( to_feature ( obj ) ) if newfeat : yield newfeat
Extract GeoJSON features from a text file object .
46,807
def iter_query ( query ) : try : itr = click . open_file ( query ) . readlines ( ) except IOError : itr = [ query ] return itr
Accept a filename stream or string . Returns an iterator over lines of the query .
46,808
def normalize_feature_objects ( feature_objs ) : for obj in feature_objs : if hasattr ( obj , "__geo_interface__" ) and 'type' in obj . __geo_interface__ . keys ( ) and obj . __geo_interface__ [ 'type' ] == 'Feature' : yield obj . __geo_interface__ elif isinstance ( obj , dict ) and 'type' in obj and obj [ 'type' ] == 'Feature' : yield obj else : raise ValueError ( "Did not recognize object {0}" "as GeoJSON Feature" . format ( obj ) )
Takes an iterable of GeoJSON - like Feature mappings or an iterable of objects with a geo interface and normalizes it to the former .
46,809
async def api ( self , endpoint , params = None , test = False ) : data = { } url = "{}/{}" . format ( self . _api , endpoint ) try : async with async_timeout . timeout ( 8 , loop = self . _loop ) : response = await self . _session . get ( url , auth = self . _auth , headers = HEADERS , params = params ) if response . status == 200 : self . _authenticated = True self . _connected = True if not test : data = await response . json ( ) elif response . status == 401 : self . _authenticated = False self . _connected = True except asyncio . TimeoutError as error : self . _authenticated , self . _connected = False , False if not test : _LOGGER . warning ( "Timeouterror connecting to Traccar, %s" , error ) except aiohttp . ClientError as error : self . _authenticated , self . _connected = False , False if not test : _LOGGER . warning ( "Error connecting to Traccar, %s" , error ) except socket . gaierror as error : self . _authenticated , self . _connected = False , False if not test : _LOGGER . warning ( "Error connecting to Traccar, %s" , error ) except TypeError as error : self . _authenticated , self . _connected = False , False if not test : _LOGGER . warning ( "Error connecting to Traccar, %s" , error ) except Exception as error : self . _authenticated , self . _connected = False , False if not test : _LOGGER . warning ( "Error connecting to Traccar, %s" , error ) return data
Comunicate with the API .
46,810
async def runcli ( ) : async with aiohttp . ClientSession ( ) as session : host = input ( "IP: " ) username = input ( "Username: " ) password = input ( "Password: " ) print ( "\n\n\n" ) data = API ( LOOP , session , username , password , host ) await data . test_connection ( ) print ( "Authenticated:" , data . authenticated ) if data . authenticated : await data . get_device_info ( ) print ( "Authentication:" , data . authenticated ) print ( "Geofences:" , data . geofences ) print ( "Devices:" , data . devices ) print ( "Positions:" , data . positions ) print ( "Device info:" , data . device_info )
Debug of pytraccar .
46,811
def restore ( s , t ) : t = ( c for c in t ) return '' . join ( next ( t ) if not is_blacksquare ( c ) else c for c in s )
s is the source string it can contain . t is the target it s smaller than s by the number of . s in s
46,812
def default ( event , data ) : messages = defaultdict ( lambda : 'Avast:' ) messages [ 'start' ] = 'Thar she blows!' messages [ 'tag' ] = 'Thar she blows!' messages [ 'stop' ] = 'Away into the depths:' messages [ 'destroy' ] = 'Away into the depths:' messages [ 'delete' ] = 'Away into the depths:' status = get_status ( event ) message = messages [ status ] + ' %s/%s' log . info ( message , status , get_id ( event ) ) log . debug ( '"data": %s' , form_json ( data ) )
The default handler prints basic event info .
46,813
def table ( tab ) : global open_tables if tab in open_tables : yield open_tables [ tab ] else : open_tables [ tab ] = iptc . Table ( tab ) open_tables [ tab ] . refresh ( ) open_tables [ tab ] . autocommit = False yield open_tables [ tab ] open_tables [ tab ] . commit ( ) del open_tables [ tab ]
Access IPTables transactionally in a uniform way .
46,814
def format_symbol ( self , symbol , link_resolver ) : if not symbol : return '' if isinstance ( symbol , FieldSymbol ) : return '' out = self . _format_symbol ( symbol ) template = self . get_template ( 'symbol_wrapper.html' ) return template . render ( { 'symbol' : symbol , 'formatted_doc' : out } )
Format a symbols . Symbol
46,815
def add_comment ( self , comment ) : if not comment : return self . __comments [ comment . name ] = comment self . comment_added_signal ( self , comment )
Add a comment to the database .
46,816
def touch ( fname ) : orig_mtime = get_mtime ( fname ) while get_mtime ( fname ) == orig_mtime : pathlib . Path ( fname ) . touch ( )
Mimics the touch command
46,817
def debug ( self , message , domain = None ) : if domain is None : domain = self . extension_name debug ( message , domain )
Shortcut function for utils . loggable . debug
46,818
def info ( self , message , domain = None ) : if domain is None : domain = self . extension_name info ( message , domain )
Shortcut function for utils . loggable . info
46,819
def parse_config ( self , config ) : prefix = self . argument_prefix self . sources = config . get_sources ( prefix ) self . smart_sources = [ self . _get_smart_filename ( s ) for s in self . sources ] self . index = config . get_index ( prefix ) self . source_roots = OrderedSet ( config . get_paths ( '%s_source_roots' % prefix ) ) for arg , dest in list ( self . paths_arguments . items ( ) ) : val = config . get_paths ( arg ) setattr ( self , dest , val ) for arg , dest in list ( self . path_arguments . items ( ) ) : val = config . get_path ( arg ) setattr ( self , dest , val ) self . formatter . parse_config ( config )
Override this making sure to chain up first if your extension adds its own custom command line arguments or you want to do any further processing on the automatically added arguments .
46,820
def add_attrs ( self , symbol , ** kwargs ) : for key , val in kwargs . items ( ) : symbol . add_extension_attribute ( self . extension_name , key , val )
Helper for setting symbol extension attributes
46,821
def get_attr ( self , symbol , attrname ) : return symbol . extension_attributes . get ( self . extension_name , { } ) . get ( attrname , None )
Helper for getting symbol extension attributes
46,822
def add_index_argument ( cls , group ) : prefix = cls . argument_prefix group . add_argument ( '--%s-index' % prefix , action = "store" , dest = "%s_index" % prefix , help = ( "Name of the %s root markdown file, can be None" % ( cls . extension_name ) ) )
Subclasses may call this to add an index argument .
46,823
def add_sources_argument ( cls , group , allow_filters = True , prefix = None , add_root_paths = False ) : prefix = prefix or cls . argument_prefix group . add_argument ( "--%s-sources" % prefix , action = "store" , nargs = "+" , dest = "%s_sources" % prefix . replace ( '-' , '_' ) , help = "%s source files to parse" % prefix ) if allow_filters : group . add_argument ( "--%s-source-filters" % prefix , action = "store" , nargs = "+" , dest = "%s_source_filters" % prefix . replace ( '-' , '_' ) , help = "%s source files to ignore" % prefix ) if add_root_paths : group . add_argument ( "--%s-source-roots" % prefix , action = "store" , nargs = "+" , dest = "%s_source_roots" % prefix . replace ( '-' , '_' ) , help = "%s source root directories allowing files " "to be referenced relatively to those" % prefix )
Subclasses may call this to add sources and source_filters arguments .
46,824
def add_path_argument ( cls , group , argname , dest = None , help_ = None ) : prefixed = '%s-%s' % ( cls . argument_prefix , argname ) if dest is None : dest = prefixed . replace ( '-' , '_' ) final_dest = dest [ len ( cls . argument_prefix ) + 1 : ] else : final_dest = dest dest = '%s_%s' % ( cls . argument_prefix , dest ) group . add_argument ( '--%s' % prefixed , action = 'store' , dest = dest , help = help_ ) cls . path_arguments [ dest ] = final_dest
Subclasses may call this to expose a path argument .
46,825
def add_paths_argument ( cls , group , argname , dest = None , help_ = None ) : prefixed = '%s-%s' % ( cls . argument_prefix , argname ) if dest is None : dest = prefixed . replace ( '-' , '_' ) final_dest = dest [ len ( cls . argument_prefix ) + 1 : ] else : final_dest = dest dest = '%s_%s' % ( cls . argument_prefix , dest ) group . add_argument ( '--%s' % prefixed , action = 'store' , nargs = '+' , dest = dest , help = help_ ) cls . paths_arguments [ dest ] = final_dest
Subclasses may call this to expose a paths argument .
46,826
def create_symbol ( self , * args , ** kwargs ) : if not kwargs . get ( 'project_name' ) : kwargs [ 'project_name' ] = self . project . project_name sym = self . app . database . create_symbol ( * args , ** kwargs ) if sym : if type ( sym ) != Symbol : self . _created_symbols [ sym . filename ] . add ( sym . unique_name ) return sym
Extensions that discover and create instances of symbols . Symbol should do this through this method as it will keep an index of these which can be used when generating a naive index .
46,827
def format_page ( self , page , link_resolver , output ) : debug ( 'Formatting page %s' % page . link . ref , 'formatting' ) if output : actual_output = os . path . join ( output , 'html' ) if not os . path . exists ( actual_output ) : os . makedirs ( actual_output ) else : actual_output = None page . format ( self . formatter , link_resolver , actual_output )
Called by project . Project . format_page to leave full control to extensions over the formatting of the pages they are responsible of .
46,828
def add_subproject ( self , fname , conf_path ) : config = Config ( conf_file = conf_path ) proj = Project ( self . app , dependency_map = self . dependency_map ) proj . parse_name_from_config ( config ) proj . parse_config ( config ) proj . setup ( ) self . subprojects [ fname ] = proj
Creates and adds a new subproject .
46,829
def _no_duplicates_constructor ( loader , node , deep = False ) : mapping = { } for key_node , value_node in node . value : key = loader . construct_object ( key_node , deep = deep ) value = loader . construct_object ( value_node , deep = deep ) if key in mapping : raise ConstructorError ( "while constructing a mapping" , node . start_mark , "found duplicate key (%s)" % key , key_node . start_mark ) mapping [ key ] = value return loader . construct_mapping ( node , deep )
Check for duplicate keys .
46,830
def resolve_symbols ( self , tree , database , link_resolver ) : self . typed_symbols = self . __get_empty_typed_symbols ( ) all_syms = OrderedSet ( ) for sym_name in self . symbol_names : sym = database . get_symbol ( sym_name ) self . __query_extra_symbols ( sym , all_syms , tree , link_resolver , database ) if tree . project . is_toplevel : page_path = self . link . ref else : page_path = self . project_name + '/' + self . link . ref if self . meta . get ( "auto-sort" , True ) : all_syms = sorted ( all_syms , key = lambda x : x . unique_name ) for sym in all_syms : sym . update_children_comments ( ) self . __resolve_symbol ( sym , link_resolver , page_path ) self . symbol_names . add ( sym . unique_name ) no_parent_syms = self . by_parent_symbols . pop ( None , None ) if no_parent_syms : self . by_parent_symbols [ None ] = no_parent_syms for sym_type in [ ClassSymbol , AliasSymbol , InterfaceSymbol , StructSymbol ] : syms = self . typed_symbols [ sym_type ] . symbols if not syms : continue if self . title is None : self . title = syms [ 0 ] . display_name if self . comment is None : self . comment = Comment ( name = self . name ) self . comment . short_description = syms [ 0 ] . comment . short_description self . comment . title = syms [ 0 ] . comment . title break
When this method is called the page s symbol names are queried from database and added to lists of actual symbols sorted by symbol class .
46,831
def walk ( self , parent = None ) : if parent is None : yield self . root parent = self . root for cpage_name in parent . subpages : cpage = self . __all_pages [ cpage_name ] yield cpage for page in self . walk ( parent = cpage ) : yield page
Generator that yields pages in infix order
46,832
def get_extension_classes ( ) : res = [ SyntaxHighlightingExtension , SearchExtension , TagExtension , DevhelpExtension , LicenseExtension , GitUploadExtension , EditOnGitHubExtension ] if sys . version_info [ 1 ] >= 5 : res += [ DBusExtension ] try : from hotdoc . extensions . c . c_extension import CExtension res += [ CExtension ] except ImportError : pass try : from hotdoc . extensions . gi . gi_extension import GIExtension res += [ GIExtension ] except ImportError : pass return res
Hotdoc s setuptools entry point
46,833
def register_functions ( lib , ignore_errors ) : def register ( item ) : return register_function ( lib , item , ignore_errors ) for f in functionList : register ( f )
Register function prototypes with a libclang library instance .
46,834
def from_offset ( tu , file , offset ) : return conf . lib . clang_getLocationForOffset ( tu , file , offset )
Retrieve a SourceLocation from a given character offset .
46,835
def get_tokens ( tu , extent ) : tokens_memory = POINTER ( Token ) ( ) tokens_count = c_uint ( ) conf . lib . clang_tokenize ( tu , extent , byref ( tokens_memory ) , byref ( tokens_count ) ) count = int ( tokens_count . value ) if count < 1 : return tokens_array = cast ( tokens_memory , POINTER ( Token * count ) ) . contents token_group = TokenGroup ( tu , tokens_memory , tokens_count ) for i in xrange ( 0 , count ) : token = Token ( ) token . int_data = tokens_array [ i ] . int_data token . ptr_data = tokens_array [ i ] . ptr_data token . _tu = tu token . _group = token_group yield token
Helper method to return all tokens in an extent .
46,836
def from_value ( value ) : result = TokenKind . _value_map . get ( value , None ) if result is None : raise ValueError ( 'Unknown TokenKind: %d' % value ) return result
Obtain a registered TokenKind instance from its value .
46,837
def register ( value , name ) : if value in TokenKind . _value_map : raise ValueError ( 'TokenKind already registered: %d' % value ) kind = TokenKind ( value , name ) TokenKind . _value_map [ value ] = kind setattr ( TokenKind , name , kind )
Register a new TokenKind enumeration .
46,838
def canonical ( self ) : if not hasattr ( self , '_canonical' ) : self . _canonical = conf . lib . clang_getCanonicalCursor ( self ) return self . _canonical
Return the canonical Cursor corresponding to this Cursor .
46,839
def result_type ( self ) : if not hasattr ( self , '_result_type' ) : self . _result_type = conf . lib . clang_getResultType ( self . type ) return self . _result_type
Retrieve the Type of the result for this Cursor .
46,840
def underlying_typedef_type ( self ) : if not hasattr ( self , '_underlying_type' ) : assert self . kind . is_declaration ( ) self . _underlying_type = conf . lib . clang_getTypedefDeclUnderlyingType ( self ) return self . _underlying_type
Return the underlying type of a typedef declaration .
46,841
def enum_type ( self ) : if not hasattr ( self , '_enum_type' ) : assert self . kind == CursorKind . ENUM_DECL self . _enum_type = conf . lib . clang_getEnumDeclIntegerType ( self ) return self . _enum_type
Return the integer type of an enum declaration .
46,842
def enum_value ( self ) : if not hasattr ( self , '_enum_value' ) : assert self . kind == CursorKind . ENUM_CONSTANT_DECL underlying_type = self . type if underlying_type . kind == TypeKind . ENUM : underlying_type = underlying_type . get_declaration ( ) . enum_type if underlying_type . kind in ( TypeKind . CHAR_U , TypeKind . UCHAR , TypeKind . CHAR16 , TypeKind . CHAR32 , TypeKind . USHORT , TypeKind . UINT , TypeKind . ULONG , TypeKind . ULONGLONG , TypeKind . UINT128 ) : self . _enum_value = conf . lib . clang_getEnumConstantDeclUnsignedValue ( self ) else : self . _enum_value = conf . lib . clang_getEnumConstantDeclValue ( self ) return self . _enum_value
Return the value of an enum constant .
46,843
def hash ( self ) : if not hasattr ( self , '_hash' ) : self . _hash = conf . lib . clang_hashCursor ( self ) return self . _hash
Returns a hash of the cursor as an int .
46,844
def semantic_parent ( self ) : if not hasattr ( self , '_semantic_parent' ) : self . _semantic_parent = conf . lib . clang_getCursorSemanticParent ( self ) return self . _semantic_parent
Return the semantic parent for this cursor .
46,845
def lexical_parent ( self ) : if not hasattr ( self , '_lexical_parent' ) : self . _lexical_parent = conf . lib . clang_getCursorLexicalParent ( self ) return self . _lexical_parent
Return the lexical parent for this cursor .
46,846
def referenced ( self ) : if not hasattr ( self , '_referenced' ) : self . _referenced = conf . lib . clang_getCursorReferenced ( self ) return self . _referenced
For a cursor that is a reference returns a cursor representing the entity that it references .
46,847
def brief_comment ( self ) : r = conf . lib . clang_Cursor_getBriefCommentText ( self ) if not r : return None return str ( r )
Returns the brief comment text associated with that Cursor
46,848
def raw_comment ( self ) : r = conf . lib . clang_Cursor_getRawCommentText ( self ) if not r : return None return str ( r )
Returns the raw comment text associated with that Cursor
46,849
def get_arguments ( self ) : num_args = conf . lib . clang_Cursor_getNumArguments ( self ) for i in xrange ( 0 , num_args ) : yield conf . lib . clang_Cursor_getArgument ( self , i )
Return an iterator for accessing the arguments of this cursor .
46,850
def get_children ( self ) : def visitor ( child , parent , children ) : assert child != conf . lib . clang_getNullCursor ( ) child . _tu = self . _tu children . append ( child ) return 1 children = [ ] conf . lib . clang_visitChildren ( self , callbacks [ 'cursor_visit' ] ( visitor ) , children ) return iter ( children )
Return an iterator for accessing the children of this cursor .
46,851
def walk_preorder ( self ) : yield self for child in self . get_children ( ) : for descendant in child . walk_preorder ( ) : yield descendant
Depth - first preorder walk over the cursor and its descendants .
46,852
def is_anonymous ( self ) : if self . kind == CursorKind . FIELD_DECL : return self . type . get_declaration ( ) . is_anonymous ( ) return conf . lib . clang_Cursor_isAnonymous ( self )
Check if the record is anonymous .
46,853
def argument_types ( self ) : class ArgumentsIterator ( collections . Sequence ) : def __init__ ( self , parent ) : self . parent = parent self . length = None def __len__ ( self ) : if self . length is None : self . length = conf . lib . clang_getNumArgTypes ( self . parent ) return self . length def __getitem__ ( self , key ) : if not isinstance ( key , int ) : raise TypeError ( "Must supply a non-negative int." ) if key < 0 : raise IndexError ( "Only non-negative indexes are accepted." ) if key >= len ( self ) : raise IndexError ( "Index greater than container length: " "%d > %d" % ( key , len ( self ) ) ) result = conf . lib . clang_getArgType ( self . parent , key ) if result . kind == TypeKind . INVALID : raise IndexError ( "Argument could not be retrieved." ) return result assert self . kind == TypeKind . FUNCTIONPROTO return ArgumentsIterator ( self )
Retrieve a container for the non - variadic arguments for this type .
46,854
def element_type ( self ) : result = conf . lib . clang_getElementType ( self ) if result . kind == TypeKind . INVALID : raise Exception ( 'Element type not available on this type.' ) return result
Retrieve the Type of elements within this Type .
46,855
def element_count ( self ) : result = conf . lib . clang_getNumElements ( self ) if result < 0 : raise Exception ( 'Type does not have elements.' ) return result
Retrieve the number of elements in this type .
46,856
def is_function_variadic ( self ) : assert self . kind == TypeKind . FUNCTIONPROTO return conf . lib . clang_isFunctionTypeVariadic ( self )
Determine whether this function Type is a variadic function type .
46,857
def get_fields ( self ) : def visitor ( field , children ) : assert field != conf . lib . clang_getNullCursor ( ) field . _tu = self . _tu fields . append ( field ) return 1 fields = [ ] conf . lib . clang_Type_visitFields ( self , callbacks [ 'fields_visit' ] ( visitor ) , fields ) return iter ( fields )
Return an iterator for accessing the fields of this type .
46,858
def parse ( self , path , args = None , unsaved_files = None , options = 0 ) : return TranslationUnit . from_source ( path , args , unsaved_files , options , self )
Load the translation unit from the given source code file by running clang and generating the AST before loading . Additional command line parameters can be passed to clang via the args parameter .
46,859
def from_ast_file ( cls , filename , index = None ) : if index is None : index = Index . create ( ) ptr = conf . lib . clang_createTranslationUnit ( index , filename ) if not ptr : raise TranslationUnitLoadError ( filename ) return cls ( ptr = ptr , index = index )
Create a TranslationUnit instance from a saved AST file .
46,860
def get_includes ( self ) : def visitor ( fobj , lptr , depth , includes ) : if depth > 0 : loc = lptr . contents includes . append ( FileInclusion ( loc . file , File ( fobj ) , loc , depth ) ) includes = [ ] conf . lib . clang_getInclusions ( self , callbacks [ 'translation_unit_includes' ] ( visitor ) , includes ) return iter ( includes )
Return an iterable sequence of FileInclusion objects that describe the sequence of inclusions in a translation unit . The first object in this sequence is always the input file . Note that this method will not recursively iterate over header files included through precompiled headers .
46,861
def get_location ( self , filename , position ) : f = self . get_file ( filename ) if isinstance ( position , int ) : return SourceLocation . from_offset ( self , f , position ) return SourceLocation . from_position ( self , f , position [ 0 ] , position [ 1 ] )
Obtain a SourceLocation for a file in this translation unit .
46,862
def get_extent ( self , filename , locations ) : f = self . get_file ( filename ) if len ( locations ) < 2 : raise Exception ( 'Must pass object with at least 2 elements' ) start_location , end_location = locations if hasattr ( start_location , '__len__' ) : start_location = SourceLocation . from_position ( self , f , start_location [ 0 ] , start_location [ 1 ] ) elif isinstance ( start_location , int ) : start_location = SourceLocation . from_offset ( self , f , start_location ) if hasattr ( end_location , '__len__' ) : end_location = SourceLocation . from_position ( self , f , end_location [ 0 ] , end_location [ 1 ] ) elif isinstance ( end_location , int ) : end_location = SourceLocation . from_offset ( self , f , end_location ) assert isinstance ( start_location , SourceLocation ) assert isinstance ( end_location , SourceLocation ) return SourceRange . from_locations ( start_location , end_location )
Obtain a SourceRange from this translation unit .
46,863
def reparse ( self , unsaved_files = None , options = 0 ) : if unsaved_files is None : unsaved_files = [ ] unsaved_files_array = 0 if len ( unsaved_files ) : unsaved_files_array = ( _CXUnsavedFile * len ( unsaved_files ) ) ( ) for i , ( name , value ) in enumerate ( unsaved_files ) : if not isinstance ( value , str ) : value = value . read ( ) print ( value ) if not isinstance ( value , str ) : raise TypeError ( 'Unexpected unsaved file contents.' ) unsaved_files_array [ i ] . name = name unsaved_files_array [ i ] . contents = value unsaved_files_array [ i ] . length = len ( value ) ptr = conf . lib . clang_reparseTranslationUnit ( self , len ( unsaved_files ) , unsaved_files_array , options )
Reparse an already parsed translation unit .
46,864
def save ( self , filename ) : options = conf . lib . clang_defaultSaveOptions ( self ) result = int ( conf . lib . clang_saveTranslationUnit ( self , filename , options ) ) if result != 0 : raise TranslationUnitSaveError ( result , 'Error saving TranslationUnit.' )
Saves the TranslationUnit to a file .
46,865
def codeComplete ( self , path , line , column , unsaved_files = None , include_macros = False , include_code_patterns = False , include_brief_comments = False ) : options = 0 if include_macros : options += 1 if include_code_patterns : options += 2 if include_brief_comments : options += 4 if unsaved_files is None : unsaved_files = [ ] unsaved_files_array = 0 if len ( unsaved_files ) : unsaved_files_array = ( _CXUnsavedFile * len ( unsaved_files ) ) ( ) for i , ( name , value ) in enumerate ( unsaved_files ) : if not isinstance ( value , str ) : value = value . read ( ) print ( value ) if not isinstance ( value , str ) : raise TypeError ( 'Unexpected unsaved file contents.' ) unsaved_files_array [ i ] . name = c_string_p ( name ) unsaved_files_array [ i ] . contents = c_string_p ( value ) unsaved_files_array [ i ] . length = len ( value ) ptr = conf . lib . clang_codeCompleteAt ( self , path , line , column , unsaved_files_array , len ( unsaved_files ) , options ) if ptr : return CodeCompletionResults ( ptr ) return None
Code complete in this translation unit .
46,866
def get_tokens ( self , locations = None , extent = None ) : if locations is not None : extent = SourceRange ( start = locations [ 0 ] , end = locations [ 1 ] ) return TokenGroup . get_tokens ( self , extent )
Obtain tokens in this translation unit .
46,867
def name ( self ) : return str ( conf . lib . clang_getCString ( conf . lib . clang_getFileName ( self ) ) )
Return the complete file and path name of the file .
46,868
def arguments ( self ) : length = conf . lib . clang_CompileCommand_getNumArgs ( self . cmd ) for i in xrange ( length ) : yield str ( conf . lib . clang_CompileCommand_getArg ( self . cmd , i ) )
Get an iterable object providing each argument in the command line for the compiler invocation as a _CXString .
46,869
def fromDirectory ( buildDir ) : errorCode = c_uint ( ) try : cdb = conf . lib . clang_CompilationDatabase_fromDirectory ( buildDir , byref ( errorCode ) ) except CompilationDatabaseError as e : raise CompilationDatabaseError ( int ( errorCode . value ) , "CompilationDatabase loading failed" ) return cdb
Builds a CompilationDatabase from the database found in buildDir
46,870
def get_klass_parents ( gi_name ) : res = [ ] parents = __HIERARCHY_GRAPH . predecessors ( gi_name ) if not parents : return [ ] __get_parent_link_recurse ( parents [ 0 ] , res ) return res
Returns a sorted list of qualified symbols representing the parents of the klass - like symbol named gi_name
46,871
def get_klass_children ( gi_name ) : res = { } children = __HIERARCHY_GRAPH . successors ( gi_name ) for gi_name in children : ctype_name = ALL_GI_TYPES [ gi_name ] qs = QualifiedSymbol ( type_tokens = [ Link ( None , ctype_name , ctype_name ) ] ) qs . add_extension_attribute ( 'gi-extension' , 'type_desc' , SymbolTypeDesc ( [ ] , gi_name , ctype_name , 0 ) ) res [ ctype_name ] = qs return res
Returns a dict of qualified symbols representing the children of the klass - like symbol named gi_name
46,872
def cache_nodes ( gir_root , all_girs ) : ns_node = gir_root . find ( './{%s}namespace' % NS_MAP [ 'core' ] ) id_prefixes = ns_node . attrib [ '{%s}identifier-prefixes' % NS_MAP [ 'c' ] ] sym_prefixes = ns_node . attrib [ '{%s}symbol-prefixes' % NS_MAP [ 'c' ] ] id_key = '{%s}identifier' % NS_MAP [ 'c' ] for node in gir_root . xpath ( './/*[@c:identifier]' , namespaces = NS_MAP ) : make_translations ( node . attrib [ id_key ] , node ) id_type = c_ns ( 'type' ) glib_type = glib_ns ( 'type-name' ) class_tag = core_ns ( 'class' ) callback_tag = core_ns ( 'callback' ) interface_tag = core_ns ( 'interface' ) for node in gir_root . xpath ( './/*[not(self::core:type) and not (self::core:array)][@c:type or @glib:type-name]' , namespaces = NS_MAP ) : try : name = node . attrib [ id_type ] except KeyError : name = node . attrib [ glib_type ] make_translations ( name , node ) gi_name = '.' . join ( get_gi_name_components ( node ) ) ALL_GI_TYPES [ gi_name ] = get_klass_name ( node ) if node . tag in ( class_tag , interface_tag ) : __update_hierarchies ( ns_node . attrib . get ( 'name' ) , node , gi_name ) make_translations ( '%s::%s' % ( name , name ) , node ) __generate_smart_filters ( id_prefixes , sym_prefixes , node ) elif node . tag in ( callback_tag , ) : ALL_CALLBACK_TYPES . add ( node . attrib [ c_ns ( 'type' ) ] ) for field in gir_root . xpath ( './/self::core:field' , namespaces = NS_MAP ) : unique_name = get_field_c_name ( field ) make_translations ( unique_name , field ) for node in gir_root . xpath ( './/core:property' , namespaces = NS_MAP ) : name = '%s:%s' % ( get_klass_name ( node . getparent ( ) ) , node . attrib [ 'name' ] ) make_translations ( name , node ) for node in gir_root . xpath ( './/glib:signal' , namespaces = NS_MAP ) : name = '%s::%s' % ( get_klass_name ( node . getparent ( ) ) , node . attrib [ 'name' ] ) make_translations ( name , node ) for node in gir_root . xpath ( './/core:virtual-method' , namespaces = NS_MAP ) : name = get_symbol_names ( node ) [ 0 ] make_translations ( name , node ) for inc in gir_root . findall ( './core:include' , namespaces = NS_MAP ) : inc_name = inc . attrib [ "name" ] inc_version = inc . attrib [ "version" ] gir_file = __find_gir_file ( '%s-%s.gir' % ( inc_name , inc_version ) , all_girs ) if not gir_file : warn ( 'missing-gir-include' , "Couldn't find a gir for %s-%s.gir" % ( inc_name , inc_version ) ) continue if gir_file in __PARSED_GIRS : continue __PARSED_GIRS . add ( gir_file ) inc_gir_root = etree . parse ( gir_file ) . getroot ( ) cache_nodes ( inc_gir_root , all_girs )
Identify and store all the gir symbols the symbols we will document may link to or be typed with
46,873
def type_description_from_node ( gi_node ) : ctype_name , gi_name , array_nesting = unnest_type ( gi_node ) cur_ns = get_namespace ( gi_node ) if ctype_name is not None : type_tokens = __type_tokens_from_cdecl ( ctype_name ) else : type_tokens = __type_tokens_from_gitype ( cur_ns , gi_name ) namespaced = '%s.%s' % ( cur_ns , gi_name ) if namespaced in ALL_GI_TYPES : gi_name = namespaced return SymbolTypeDesc ( type_tokens , gi_name , ctype_name , array_nesting )
Parse a typed node returns a usable description
46,874
def is_introspectable ( name , language ) : if name in FUNDAMENTALS [ language ] : return True if name not in __TRANSLATED_NAMES [ language ] : return False return True
Do not call this before caching the nodes
46,875
def get_markdown_files ( self , dir_ ) : md_files = OrderedSet ( ) for root , _ , files in os . walk ( dir_ ) : for name in files : split = os . path . splitext ( name ) if len ( split ) == 1 : continue if split [ 1 ] in ( '.markdown' , '.md' , '.yaml' ) : md_files . add ( os . path . join ( root , name ) ) return md_files
Get all the markdown files in a folder recursively
46,876
def get ( self , key , default = None ) : if key in self . __cli : return self . __cli [ key ] if key in self . __config : return self . __config . get ( key ) if key in self . __defaults : return self . __defaults . get ( key ) return default
Get the value for key .
46,877
def get_index ( self , prefix = '' ) : if prefix : prefixed = '%s_index' % prefix else : prefixed = 'index' if prefixed in self . __cli and self . __cli [ prefixed ] : index = self . __cli . get ( prefixed ) from_conf = False else : index = self . __config . get ( prefixed ) from_conf = True return self . __abspath ( index , from_conf )
Retrieve the absolute path to an index according to prefix .
46,878
def get_path ( self , key , rel_to_cwd = False , rel_to_conf = False ) : if key in self . __cli : path = self . __cli [ key ] from_conf = False else : path = self . __config . get ( key ) from_conf = True if not isinstance ( path , str ) : return None res = self . __abspath ( path , from_conf ) if rel_to_cwd : return os . path . relpath ( res , self . __invoke_dir ) if rel_to_conf : return os . path . relpath ( res , self . __conf_dir ) return self . __abspath ( path , from_conf )
Retrieve a path from the config resolving it against the invokation directory or the configuration file directory depending on whether it was passed through the command - line or the configuration file .
46,879
def get_paths ( self , key ) : final_paths = [ ] if key in self . __cli : paths = self . __cli [ key ] or [ ] from_conf = False else : paths = self . __config . get ( key ) or [ ] from_conf = True for path in flatten_list ( paths ) : final_path = self . __abspath ( path , from_conf ) if final_path : final_paths . append ( final_path ) return final_paths
Same as ConfigParser . get_path for a list of paths .
46,880
def get_sources ( self , prefix = '' ) : prefix = prefix . replace ( '-' , '_' ) prefixed = '%s_sources' % prefix if prefixed in self . __cli : sources = self . __cli . get ( prefixed ) from_conf = False else : sources = self . __config . get ( prefixed ) from_conf = True if sources is None : return OrderedSet ( ) sources = self . __resolve_patterns ( sources , from_conf ) prefixed = '%s_source_filters' % prefix if prefixed in self . __cli : filters = self . __cli . get ( prefixed ) from_conf = False else : filters = self . __config . get ( prefixed ) from_conf = True if filters is None : return sources sources -= self . __resolve_patterns ( filters , from_conf ) return sources
Retrieve a set of absolute paths to sources according to prefix
46,881
def get_dependencies ( self ) : all_deps = OrderedSet ( ) for key , _ in list ( self . __config . items ( ) ) : if key in self . __cli : continue if key . endswith ( 'sources' ) : all_deps |= self . get_sources ( key [ : len ( 'sources' ) * - 1 - 1 ] ) for key , _ in list ( self . __cli . items ( ) ) : if key . endswith ( 'sources' ) : all_deps |= self . get_sources ( key [ : len ( 'sources' ) * - 1 - 1 ] ) if self . conf_file is not None : all_deps . add ( self . conf_file ) all_deps . add ( self . get_path ( "sitemap" , rel_to_cwd = True ) ) cwd = os . getcwd ( ) return [ os . path . relpath ( fname , cwd ) for fname in all_deps if fname ]
Retrieve the set of all dependencies for a given configuration .
46,882
def dump ( self , conf_file = None ) : if conf_file : conf_dir = os . path . dirname ( conf_file ) if not conf_dir : conf_dir = self . __invoke_dir elif not os . path . exists ( conf_dir ) : os . makedirs ( conf_dir ) else : conf_dir = self . __conf_dir final_conf = { } for key , value in list ( self . __config . items ( ) ) : if key in self . __cli : continue final_conf [ key ] = value for key , value in list ( self . __cli . items ( ) ) : if key . endswith ( 'index' ) or key in [ 'sitemap' , 'output' ] : path = self . __abspath ( value , from_conf = False ) if path : relpath = os . path . relpath ( path , conf_dir ) final_conf [ key ] = relpath elif key . endswith ( 'sources' ) or key . endswith ( 'source_filters' ) : new_list = [ ] for path in value : path = self . __abspath ( path , from_conf = False ) if path : relpath = os . path . relpath ( path , conf_dir ) new_list . append ( relpath ) final_conf [ key ] = new_list elif key not in [ 'command' , 'output_conf_file' ] : final_conf [ key ] = value with open ( conf_file or self . conf_file or 'hotdoc.json' , 'w' ) as _ : _ . write ( json . dumps ( final_conf , sort_keys = True , indent = 4 ) )
Dump the possibly updated config to a file .
46,883
def _update_submodules ( repo_dir ) : subprocess . check_call ( "git submodule init" , cwd = repo_dir , shell = True ) subprocess . check_call ( "git submodule update --recursive" , cwd = repo_dir , shell = True )
update submodules in a repo
46,884
def require_clean_submodules ( repo_root , submodules ) : for do_nothing in ( '-h' , '--help' , '--help-commands' , 'clean' , 'submodule' ) : if do_nothing in sys . argv : return status = _check_submodule_status ( repo_root , submodules ) if status == "missing" : print ( "checking out submodules for the first time" ) _update_submodules ( repo_root ) elif status == "unclean" : print ( UNCLEAN_SUBMODULES_MSG )
Check on git submodules before distutils can do anything Since distutils cannot be trusted to update the tree after everything has been set in motion this is not a distutils command .
46,885
def symlink ( source , link_name ) : if os . path . islink ( link_name ) and os . readlink ( link_name ) == source : return os_symlink = getattr ( os , "symlink" , None ) if callable ( os_symlink ) : os_symlink ( source , link_name ) else : import ctypes csl = ctypes . windll . kernel32 . CreateSymbolicLinkW csl . argtypes = ( ctypes . c_wchar_p , ctypes . c_wchar_p , ctypes . c_uint32 ) csl . restype = ctypes . c_ubyte flags = 1 if os . path . isdir ( source ) else 0 if csl ( link_name , source , flags ) == 0 : raise ctypes . WinError ( )
Method to allow creating symlinks on Windows
46,886
def pkgconfig ( * packages , ** kw ) : config = kw . setdefault ( 'config' , { } ) optional_args = kw . setdefault ( 'optional' , '' ) flag_map = { 'include_dirs' : [ '--cflags-only-I' , 2 ] , 'library_dirs' : [ '--libs-only-L' , 2 ] , 'libraries' : [ '--libs-only-l' , 2 ] , 'extra_compile_args' : [ '--cflags-only-other' , 0 ] , 'extra_link_args' : [ '--libs-only-other' , 0 ] , } for package in packages : for distutils_key , ( pkg_option , n ) in flag_map . items ( ) : items = subprocess . check_output ( [ 'pkg-config' , optional_args , pkg_option , package ] ) . decode ( 'utf8' ) . split ( ) config . setdefault ( distutils_key , [ ] ) . extend ( [ i [ n : ] for i in items ] ) return config
Query pkg - config for library compile and linking options . Return configuration in distutils Extension format .
46,887
def register_error_code ( code , exception_type , domain = 'core' ) : Logger . _error_code_to_exception [ code ] = ( exception_type , domain ) Logger . _domain_codes [ domain ] . add ( code )
Register a new error code
46,888
def register_warning_code ( code , exception_type , domain = 'core' ) : Logger . _warning_code_to_exception [ code ] = ( exception_type , domain ) Logger . _domain_codes [ domain ] . add ( code )
Register a new warning code
46,889
def _log ( code , message , level , domain ) : entry = LogEntry ( level , domain , code , message ) Logger . journal . append ( entry ) if Logger . silent : return if level >= Logger . _verbosity : _print_entry ( entry )
Call this to add an entry in the journal
46,890
def error ( code , message , ** kwargs ) : assert code in Logger . _error_code_to_exception exc_type , domain = Logger . _error_code_to_exception [ code ] exc = exc_type ( message , ** kwargs ) Logger . _log ( code , exc . message , ERROR , domain ) raise exc
Call this to raise an exception and have it stored in the journal
46,891
def warn ( code , message , ** kwargs ) : if code in Logger . _ignored_codes : return assert code in Logger . _warning_code_to_exception exc_type , domain = Logger . _warning_code_to_exception [ code ] if domain in Logger . _ignored_domains : return level = WARNING if Logger . fatal_warnings : level = ERROR exc = exc_type ( message , ** kwargs ) Logger . _log ( code , exc . message , level , domain ) if Logger . fatal_warnings : raise exc
Call this to store a warning in the journal .
46,892
def debug ( message , domain ) : if domain in Logger . _ignored_domains : return Logger . _log ( None , message , DEBUG , domain )
Log debugging information
46,893
def info ( message , domain ) : if domain in Logger . _ignored_domains : return Logger . _log ( None , message , INFO , domain )
Log simple info
46,894
def get_issues ( ) : issues = [ ] for entry in Logger . journal : if entry . level >= WARNING : issues . append ( entry ) return issues
Get actual issues in the journal .
46,895
def reset ( ) : Logger . journal = [ ] Logger . fatal_warnings = False Logger . _ignored_codes = set ( ) Logger . _ignored_domains = set ( ) Logger . _verbosity = 2 Logger . _last_checkpoint = 0
Resets Logger to its initial state
46,896
def walk ( self , action , user_data = None ) : action ( self . index_file , self . __root , 0 , user_data ) self . __do_walk ( self . __root , 1 , action , user_data )
Walk the hierarchy applying action to each filename .
46,897
def parse ( self , filename ) : with io . open ( filename , 'r' , encoding = 'utf-8' ) as _ : lines = _ . readlines ( ) all_source_files = set ( ) source_map = { } lineno = 0 root = None index = None cur_level = - 1 parent_queue = [ ] for line in lines : try : level , line = dedent ( line ) if line . startswith ( '#' ) : lineno += 1 continue elif line . startswith ( '\\#' ) : line = line [ 1 : ] except IndentError as exc : error ( 'bad-indent' , 'Invalid indentation' , filename = filename , lineno = lineno , column = exc . column ) if not line : lineno += 1 continue source_file = dequote ( line ) if not source_file : lineno += 1 continue if source_file in all_source_files : error ( 'sitemap-duplicate' , 'Filename listed twice' , filename = filename , lineno = lineno , column = level * 8 + 1 ) all_source_files . add ( source_file ) source_map [ source_file ] = ( lineno , level * 8 + 1 ) page = OrderedDict ( ) if root is not None and level == 0 : error ( 'sitemap-error' , 'Sitemaps only support one root' , filename = filename , lineno = lineno , column = 0 ) if root is None : root = page index = source_file else : lvl_diff = cur_level - level while lvl_diff >= 0 : parent_queue . pop ( ) lvl_diff -= 1 parent_queue [ - 1 ] [ source_file ] = page parent_queue . append ( page ) cur_level = level lineno += 1 return Sitemap ( root , filename , index , source_map )
Parse a sitemap file .
46,898
def parse_comment ( self , comment , filename , lineno , endlineno , include_paths = None , stripped = False ) : if not stripped and not self . __validate_c_comment ( comment . strip ( ) ) : return None title_offset = 0 column_offset = 0 raw_comment = comment if not stripped : try : while comment [ column_offset * - 1 - 1 ] != '\n' : column_offset += 1 except IndexError : column_offset = 0 comment , title_offset = self . __strip_comment ( comment ) title_and_params , description = self . __extract_titles_params_and_description ( comment ) try : block_name , parameters , annotations , is_section = self . __parse_title_and_parameters ( filename , title_and_params ) except HotdocSourceException as _ : warn ( 'gtk-doc-bad-syntax' , message = _ . message , filename = filename , lineno = lineno + title_offset ) return None params_offset = 0 for param in parameters : param . filename = filename param . lineno = lineno param_offset = param . line_offset param . line_offset = title_offset + params_offset + 1 params_offset += param_offset param . col_offset = column_offset if not block_name : return None description_offset = 0 meta = { } tags = [ ] if description is not None : n_lines = len ( comment . split ( '\n' ) ) description_offset = ( title_offset + n_lines - len ( description . split ( '\n' ) ) ) meta [ 'description' ] , tags = self . __parse_description_and_tags ( description ) actual_parameters = OrderedDict ( { } ) for param in parameters : if is_section : cleaned_up_name = param . name . lower ( ) . replace ( '_' , '-' ) if cleaned_up_name in [ 'symbols' , 'private-symbols' , 'auto-sort' , 'sources' ] : meta . update ( self . __parse_yaml_comment ( param , filename ) ) if cleaned_up_name == 'sources' : sources_paths = [ os . path . abspath ( os . path . join ( os . path . dirname ( filename ) , path ) ) for path in meta [ cleaned_up_name ] ] meta [ cleaned_up_name ] = sources_paths else : meta [ param . name ] = param . description else : actual_parameters [ param . name ] = param annotations = { annotation . name : annotation for annotation in annotations } tags = { tag . name . lower ( ) : tag for tag in tags } block = Comment ( name = block_name , filename = filename , lineno = lineno , endlineno = endlineno , annotations = annotations , params = actual_parameters , tags = tags , raw_comment = raw_comment , meta = meta , toplevel = is_section ) block . line_offset = description_offset block . col_offset = column_offset return block
Returns a Comment given a string
46,899
def comment_to_ast ( self , comment , link_resolver ) : assert comment is not None text = comment . description if ( self . remove_xml_tags or comment . filename in self . gdbus_codegen_sources ) : text = re . sub ( '<.*?>' , '' , text ) if self . escape_html : text = cgi . escape ( text ) ast , diagnostics = cmark . gtkdoc_to_ast ( text , link_resolver ) for diag in diagnostics : if ( comment . filename and comment . filename not in self . gdbus_codegen_sources ) : column = diag . column + comment . col_offset if diag . lineno == 0 : column += comment . initial_col_offset lines = text . split ( '\n' ) line = lines [ diag . lineno ] i = 0 while line [ i ] == ' ' : i += 1 column += i - 1 if diag . lineno > 0 and any ( [ c != ' ' for c in lines [ diag . lineno - 1 ] ] ) : column += 1 lineno = - 1 if comment . lineno != - 1 : lineno = ( comment . lineno - 1 + comment . line_offset + diag . lineno ) warn ( diag . code , message = diag . message , filename = comment . filename , lineno = lineno , column = column ) return ast
Given a gtk - doc comment string returns an opaque PyCapsule containing the document root .