idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
21,000
def report_invalid_syntax ( self ) : ( exc_type , exc ) = sys . exc_info ( ) [ : 2 ] if len ( exc . args ) > 1 : offset = exc . args [ 1 ] if len ( offset ) > 2 : offset = offset [ 1 : 3 ] else : offset = ( 1 , 0 ) self . report_error ( offset [ 0 ] , offset [ 1 ] or 0 , 'E901 %s: %s' % ( exc_type . __name__ , exc . args [ 0 ] ) , self . report_invalid_syntax )
Check if the syntax is valid .
21,001
def run_check ( self , check , argument_names ) : arguments = [ ] for name in argument_names : arguments . append ( getattr ( self , name ) ) return check ( * arguments )
Run a check plugin .
21,002
def init_checker_state ( self , name , argument_names ) : if 'checker_state' in argument_names : self . checker_state = self . _checker_states . setdefault ( name , { } )
Prepare custom state for the specific checker plugin .
21,003
def check_ast ( self ) : try : tree = compile ( '' . join ( self . lines ) , '' , 'exec' , PyCF_ONLY_AST ) except ( ValueError , SyntaxError , TypeError ) : return self . report_invalid_syntax ( ) for name , cls , __ in self . _ast_checks : checker = cls ( tree , self . filename ) for lineno , offset , text , check in checker . run ( ) : if not self . lines or not noqa ( self . lines [ lineno - 1 ] ) : self . report_error ( lineno , offset , text , check )
Build the file s AST and run all AST checks .
21,004
def generate_tokens ( self ) : if self . _io_error : self . report_error ( 1 , 0 , 'E902 %s' % self . _io_error , readlines ) tokengen = tokenize . generate_tokens ( self . readline ) try : for token in tokengen : if token [ 2 ] [ 0 ] > self . total_lines : return self . noqa = token [ 4 ] and noqa ( token [ 4 ] ) self . maybe_check_physical ( token ) yield token except ( SyntaxError , tokenize . TokenError ) : self . report_invalid_syntax ( )
Tokenize the file run physical line checks and yield tokens .
21,005
def get_count ( self , prefix = '' ) : return sum ( [ self . counters [ key ] for key in self . messages if key . startswith ( prefix ) ] )
Return the total count of errors and warnings .
21,006
def get_file_results ( self ) : self . _deferred_print . sort ( ) for line_number , offset , code , text , doc in self . _deferred_print : print ( self . _fmt % { 'path' : self . filename , 'row' : self . line_offset + line_number , 'col' : offset + 1 , 'code' : code , 'text' : text , } ) if self . _show_source : if line_number > len ( self . lines ) : line = '' else : line = self . lines [ line_number - 1 ] print ( line . rstrip ( ) ) print ( re . sub ( r'\S' , ' ' , line [ : offset ] ) + '^' ) if self . _show_pep8 and doc : print ( ' ' + doc . strip ( ) ) sys . stdout . flush ( ) return self . file_errors
Print the result and return the overall count for this file .
21,007
def init_report ( self , reporter = None ) : self . options . report = ( reporter or self . options . reporter ) ( self . options ) return self . options . report
Initialize the report instance .
21,008
def check_files ( self , paths = None ) : if paths is None : paths = self . paths report = self . options . report runner = self . runner report . start ( ) try : for path in paths : if os . path . isdir ( path ) : self . input_dir ( path ) elif not self . excluded ( path ) : runner ( path ) except KeyboardInterrupt : print ( '... stopped' ) report . stop ( ) return report
Run all checks on the paths .
21,009
def input_dir ( self , dirname ) : dirname = dirname . rstrip ( '/' ) if self . excluded ( dirname ) : return 0 counters = self . options . report . counters verbose = self . options . verbose filepatterns = self . options . filename runner = self . runner for root , dirs , files in os . walk ( dirname ) : if verbose : print ( 'directory ' + root ) counters [ 'directories' ] += 1 for subdir in sorted ( dirs ) : if self . excluded ( subdir , root ) : dirs . remove ( subdir ) for filename in sorted ( files ) : if ( ( filename_match ( filename , filepatterns ) and not self . excluded ( filename , root ) ) ) : runner ( os . path . join ( root , filename ) )
Check all files in this directory and all subdirectories .
21,010
def ignore_code ( self , code ) : if len ( code ) < 4 and any ( s . startswith ( code ) for s in self . options . select ) : return False return ( code . startswith ( self . options . ignore ) and not code . startswith ( self . options . select ) )
Check if the error code should be ignored .
21,011
def get_checks ( self , argument_name ) : checks = [ ] for check , attrs in _checks [ argument_name ] . items ( ) : ( codes , args ) = attrs if any ( not ( code and self . ignore_code ( code ) ) for code in codes ) : checks . append ( ( check . __name__ , check , args ) ) return sorted ( checks )
Get all the checks for this category .
21,012
def loads ( data , use_datetime = 0 ) : p , u = getparser ( use_datetime = use_datetime ) p . feed ( data ) p . close ( ) return u . close ( ) , u . getmethodname ( )
data - > unmarshalled data method name
21,013
def transform_import ( self , node , results ) : import_mod = results . get ( "module" ) pref = import_mod . prefix names = [ ] for name in MAPPING [ import_mod . value ] [ : - 1 ] : names . extend ( [ Name ( name [ 0 ] , prefix = pref ) , Comma ( ) ] ) names . append ( Name ( MAPPING [ import_mod . value ] [ - 1 ] [ 0 ] , prefix = pref ) ) import_mod . replace ( names )
Transform for the basic import case . Replaces the old import name with a comma separated list of its replacements .
21,014
def transform_member ( self , node , results ) : mod_member = results . get ( "mod_member" ) pref = mod_member . prefix member = results . get ( "member" ) if member : if isinstance ( member , list ) : member = member [ 0 ] new_name = None for change in MAPPING [ mod_member . value ] : if member . value in change [ 1 ] : new_name = change [ 0 ] break if new_name : mod_member . replace ( Name ( new_name , prefix = pref ) ) else : self . cannot_convert ( node , "This is an invalid module element" ) else : modules = [ ] mod_dict = { } members = results [ "members" ] for member in members : if member . type == syms . import_as_name : as_name = member . children [ 2 ] . value member_name = member . children [ 0 ] . value else : member_name = member . value as_name = None if member_name != u"," : for change in MAPPING [ mod_member . value ] : if member_name in change [ 1 ] : if change [ 0 ] not in mod_dict : modules . append ( change [ 0 ] ) mod_dict . setdefault ( change [ 0 ] , [ ] ) . append ( member ) new_nodes = [ ] indentation = find_indentation ( node ) first = True def handle_name ( name , prefix ) : if name . type == syms . import_as_name : kids = [ Name ( name . children [ 0 ] . value , prefix = prefix ) , name . children [ 1 ] . clone ( ) , name . children [ 2 ] . clone ( ) ] return [ Node ( syms . import_as_name , kids ) ] return [ Name ( name . value , prefix = prefix ) ] for module in modules : elts = mod_dict [ module ] names = [ ] for elt in elts [ : - 1 ] : names . extend ( handle_name ( elt , pref ) ) names . append ( Comma ( ) ) names . extend ( handle_name ( elts [ - 1 ] , pref ) ) new = FromImport ( module , names ) if not first or node . parent . prefix . endswith ( indentation ) : new . prefix = indentation new_nodes . append ( new ) first = False if new_nodes : nodes = [ ] for new_node in new_nodes [ : - 1 ] : nodes . extend ( [ new_node , Newline ( ) ] ) nodes . append ( new_nodes [ - 1 ] ) node . replace ( nodes ) else : self . cannot_convert ( node , "All module elements are invalid" )
Transform for imports of specific module elements . Replaces the module to be imported from with the appropriate new module .
21,015
def transform_dot ( self , node , results ) : module_dot = results . get ( "bare_with_attr" ) member = results . get ( "member" ) new_name = None if isinstance ( member , list ) : member = member [ 0 ] for change in MAPPING [ module_dot . value ] : if member . value in change [ 1 ] : new_name = change [ 0 ] break if new_name : module_dot . replace ( Name ( new_name , prefix = module_dot . prefix ) ) else : self . cannot_convert ( node , "This is an invalid module element" )
Transform for calls to module members in code .
21,016
def call_only_once ( func ) : def new_func ( * args , ** kwargs ) : if not new_func . _called : new_func . _called = True return func ( * args , ** kwargs ) new_func . _called = False return new_func
To be used as a decorator
21,017
def get_all_fix_names ( fixer_pkg , remove_prefix = True ) : pkg = __import__ ( fixer_pkg , [ ] , [ ] , [ "*" ] ) fixer_dir = os . path . dirname ( pkg . __file__ ) fix_names = [ ] for name in sorted ( os . listdir ( fixer_dir ) ) : if name . startswith ( "fix_" ) and name . endswith ( ".py" ) : if remove_prefix : name = name [ 4 : ] fix_names . append ( name [ : - 3 ] ) return fix_names
Return a sorted list of all available fix names in the given package .
21,018
def _get_head_types ( pat ) : if isinstance ( pat , ( pytree . NodePattern , pytree . LeafPattern ) ) : if pat . type is None : raise _EveryNode return set ( [ pat . type ] ) if isinstance ( pat , pytree . NegatedPattern ) : if pat . content : return _get_head_types ( pat . content ) raise _EveryNode if isinstance ( pat , pytree . WildcardPattern ) : r = set ( ) for p in pat . content : for x in p : r . update ( _get_head_types ( x ) ) return r raise Exception ( "Oh no! I don't understand pattern %s" % ( pat ) )
Accepts a pytree Pattern Node and returns a set of the pattern types which will match first .
21,019
def get_fixers ( self ) : pre_order_fixers = [ ] post_order_fixers = [ ] for fix_mod_path in self . fixers : mod = __import__ ( fix_mod_path , { } , { } , [ "*" ] ) fix_name = fix_mod_path . rsplit ( "." , 1 ) [ - 1 ] if fix_name . startswith ( self . FILE_PREFIX ) : fix_name = fix_name [ len ( self . FILE_PREFIX ) : ] parts = fix_name . split ( "_" ) class_name = self . CLASS_PREFIX + "" . join ( [ p . title ( ) for p in parts ] ) try : fix_class = getattr ( mod , class_name ) except AttributeError : raise FixerError ( "Can't find %s.%s" % ( fix_name , class_name ) ) fixer = fix_class ( self . options , self . fixer_log ) if fixer . explicit and self . explicit is not True and fix_mod_path not in self . explicit : self . log_message ( "Skipping implicit fixer: %s" , fix_name ) continue self . log_debug ( "Adding transformation: %s" , fix_name ) if fixer . order == "pre" : pre_order_fixers . append ( fixer ) elif fixer . order == "post" : post_order_fixers . append ( fixer ) else : raise FixerError ( "Illegal fixer order: %r" % fixer . order ) key_func = operator . attrgetter ( "run_order" ) pre_order_fixers . sort ( key = key_func ) post_order_fixers . sort ( key = key_func ) return ( pre_order_fixers , post_order_fixers )
Inspects the options to load the requested patterns and handlers .
21,020
def log_message ( self , msg , * args ) : if args : msg = msg % args self . logger . info ( msg )
Hook to log a message .
21,021
def refactor ( self , items , write = False , doctests_only = False ) : for dir_or_file in items : if os . path . isdir ( dir_or_file ) : self . refactor_dir ( dir_or_file , write , doctests_only ) else : self . refactor_file ( dir_or_file , write , doctests_only )
Refactor a list of files and directories .
21,022
def refactor_dir ( self , dir_name , write = False , doctests_only = False ) : py_ext = os . extsep + "py" for dirpath , dirnames , filenames in os . walk ( dir_name ) : self . log_debug ( "Descending into %s" , dirpath ) dirnames . sort ( ) filenames . sort ( ) for name in filenames : if ( not name . startswith ( "." ) and os . path . splitext ( name ) [ 1 ] == py_ext ) : fullname = os . path . join ( dirpath , name ) self . refactor_file ( fullname , write , doctests_only ) dirnames [ : ] = [ dn for dn in dirnames if not dn . startswith ( "." ) ]
Descends down a directory and refactor every Python file found .
21,023
def _read_python_source ( self , filename ) : try : f = open ( filename , "rb" ) except IOError as err : self . log_error ( "Can't open %s: %s" , filename , err ) return None , None try : encoding = tokenize . detect_encoding ( f . readline ) [ 0 ] finally : f . close ( ) with _open_with_encoding ( filename , "r" , encoding = encoding ) as f : return _from_system_newlines ( f . read ( ) ) , encoding
Do our best to decode a Python source file correctly .
21,024
def refactor_file ( self , filename , write = False , doctests_only = False ) : input , encoding = self . _read_python_source ( filename ) if input is None : return input += u"\n" if doctests_only : self . log_debug ( "Refactoring doctests in %s" , filename ) output = self . refactor_docstring ( input , filename ) if self . write_unchanged_files or output != input : self . processed_file ( output , filename , input , write , encoding ) else : self . log_debug ( "No doctest changes in %s" , filename ) else : tree = self . refactor_string ( input , filename ) if self . write_unchanged_files or ( tree and tree . was_changed ) : self . processed_file ( unicode ( tree ) [ : - 1 ] , filename , write = write , encoding = encoding ) else : self . log_debug ( "No changes in %s" , filename )
Refactors a file .
21,025
def refactor_string ( self , data , name ) : features = _detect_future_features ( data ) if "print_function" in features : self . driver . grammar = pygram . python_grammar_no_print_statement try : tree = self . driver . parse_string ( data ) except Exception as err : self . log_error ( "Can't parse %s: %s: %s" , name , err . __class__ . __name__ , err ) return finally : self . driver . grammar = self . grammar tree . future_features = features self . log_debug ( "Refactoring %s" , name ) self . refactor_tree ( tree , name ) return tree
Refactor a given input string .
21,026
def traverse_by ( self , fixers , traversal ) : if not fixers : return for node in traversal : for fixer in fixers [ node . type ] : results = fixer . match ( node ) if results : new = fixer . transform ( node , results ) if new is not None : node . replace ( new ) node = new
Traverse an AST applying a set of fixers to each node .
21,027
def processed_file ( self , new_text , filename , old_text = None , write = False , encoding = None ) : self . files . append ( filename ) if old_text is None : old_text = self . _read_python_source ( filename ) [ 0 ] if old_text is None : return equal = old_text == new_text self . print_output ( old_text , new_text , filename , equal ) if equal : self . log_debug ( "No changes to %s" , filename ) if not self . write_unchanged_files : return if write : self . write_file ( new_text , filename , old_text , encoding ) else : self . log_debug ( "Not writing changes to %s" , filename )
Called when a file has been refactored and there may be changes .
21,028
def write_file ( self , new_text , filename , old_text , encoding = None ) : try : f = _open_with_encoding ( filename , "w" , encoding = encoding ) except os . error as err : self . log_error ( "Can't create %s: %s" , filename , err ) return try : f . write ( _to_system_newlines ( new_text ) ) except os . error as err : self . log_error ( "Can't write %s: %s" , filename , err ) finally : f . close ( ) self . log_debug ( "Wrote changes to %s" , filename ) self . wrote = True
Writes a string to a file .
21,029
def refactor_docstring ( self , input , filename ) : result = [ ] block = None block_lineno = None indent = None lineno = 0 for line in input . splitlines ( True ) : lineno += 1 if line . lstrip ( ) . startswith ( self . PS1 ) : if block is not None : result . extend ( self . refactor_doctest ( block , block_lineno , indent , filename ) ) block_lineno = lineno block = [ line ] i = line . find ( self . PS1 ) indent = line [ : i ] elif ( indent is not None and ( line . startswith ( indent + self . PS2 ) or line == indent + self . PS2 . rstrip ( ) + u"\n" ) ) : block . append ( line ) else : if block is not None : result . extend ( self . refactor_doctest ( block , block_lineno , indent , filename ) ) block = None indent = None result . append ( line ) if block is not None : result . extend ( self . refactor_doctest ( block , block_lineno , indent , filename ) ) return u"" . join ( result )
Refactors a docstring looking for doctests .
21,030
def parse_block ( self , block , lineno , indent ) : tree = self . driver . parse_tokens ( self . wrap_toks ( block , lineno , indent ) ) tree . future_features = frozenset ( ) return tree
Parses a block into a tree .
21,031
def gen_lines ( self , block , indent ) : prefix1 = indent + self . PS1 prefix2 = indent + self . PS2 prefix = prefix1 for line in block : if line . startswith ( prefix ) : yield line [ len ( prefix ) : ] elif line == prefix . rstrip ( ) + u"\n" : yield u"\n" else : raise AssertionError ( "line=%r, prefix=%r" % ( line , prefix ) ) prefix = prefix2 while True : yield ""
Generates lines as expected by tokenize from a list of lines .
21,032
def var_to_xml ( val , name , trim_if_too_big = True , additional_in_xml = '' , evaluate_full_value = True ) : type_name , type_qualifier , is_exception_on_eval , resolver , value = get_variable_details ( val , evaluate_full_value ) try : name = quote ( name , '/>_= ' ) except : pass xml = '<var name="%s" type="%s" ' % ( make_valid_xml_value ( name ) , make_valid_xml_value ( type_name ) ) if type_qualifier : xml_qualifier = 'qualifier="%s"' % make_valid_xml_value ( type_qualifier ) else : xml_qualifier = '' if value : if len ( value ) > MAXIMUM_VARIABLE_REPRESENTATION_SIZE and trim_if_too_big : value = value [ 0 : MAXIMUM_VARIABLE_REPRESENTATION_SIZE ] value += '...' xml_value = ' value="%s"' % ( make_valid_xml_value ( quote ( value , '/>_= ' ) ) ) else : xml_value = '' if is_exception_on_eval : xml_container = ' isErrorOnEval="True"' else : if resolver is not None : xml_container = ' isContainer="True"' else : xml_container = '' return '' . join ( ( xml , xml_qualifier , xml_value , xml_container , additional_in_xml , ' />\n' ) )
single variable or dictionary to xml representation
21,033
def do ( self , arg ) : ".exploitable - Determine the approximate exploitability rating" from winappdbg import Crash event = self . debug . lastEvent crash = Crash ( event ) crash . fetch_extra_data ( event ) status , rule , description = crash . isExploitable ( ) print "-" * 79 print "Exploitability: %s" % status print "Matched rule: %s" % rule print "Description: %s" % description print "-" * 79
. exploitable - Determine the approximate exploitability rating
21,034
def _cleanup ( self ) : self . device = None self . doc = None self . parser = None self . resmgr = None self . interpreter = None
Frees lots of non - textual information such as the fonts and images and the objects that were needed to parse the PDF .
21,035
def get_app_name ( app_name ) : type_ = locate ( app_name ) if inspect . isclass ( type_ ) : return type_ . name return app_name
Returns a app name from new app config if is a class or the same app name if is not a class .
21,036
def has_role ( user , roles ) : if user and user . is_superuser : return True if not isinstance ( roles , list ) : roles = [ roles ] normalized_roles = [ ] for role in roles : if not inspect . isclass ( role ) : role = RolesManager . retrieve_role ( role ) normalized_roles . append ( role ) user_roles = get_user_roles ( user ) return any ( [ role in user_roles for role in normalized_roles ] )
Check if a user has any of the given roles .
21,037
def has_permission ( user , permission_name ) : if user and user . is_superuser : return True return permission_name in available_perm_names ( user )
Check if a user has a given permission .
21,038
def has_object_permission ( checker_name , user , obj ) : if user and user . is_superuser : return True checker = PermissionsManager . retrieve_checker ( checker_name ) user_roles = get_user_roles ( user ) if not user_roles : user_roles = [ None ] return any ( [ checker ( user_role , user , obj ) for user_role in user_roles ] )
Check if a user has permission to perform an action on an object .
21,039
def get_or_create_permission ( codename , name = camel_or_snake_to_title ) : user_ct = ContentType . objects . get_for_model ( get_user_model ( ) ) return Permission . objects . get_or_create ( content_type = user_ct , codename = codename , defaults = { 'name' : name ( codename ) if callable ( name ) else name } )
Get a Permission object from a permission name .
21,040
def get_user_roles ( user ) : if user : groups = user . groups . all ( ) roles = ( RolesManager . retrieve_role ( group . name ) for group in groups if group . name in RolesManager . get_roles_names ( ) ) return sorted ( roles , key = lambda r : r . get_name ( ) ) else : return [ ]
Get a list of a users s roles .
21,041
def clear_roles ( user ) : roles = get_user_roles ( user ) for role in roles : role . remove_role_from_user ( user ) return roles
Remove all roles from a user .
21,042
def available_perm_status ( user ) : roles = get_user_roles ( user ) permission_hash = { } for role in roles : permission_names = role . permission_names_list ( ) for permission_name in permission_names : permission_hash [ permission_name ] = get_permission ( permission_name ) in user . user_permissions . all ( ) return permission_hash
Get a boolean map of the permissions available to a user based on that user s roles .
21,043
def grant_permission ( user , permission_name ) : roles = get_user_roles ( user ) for role in roles : if permission_name in role . permission_names_list ( ) : permission = get_permission ( permission_name ) user . user_permissions . add ( permission ) return raise RolePermissionScopeException ( "This permission isn't in the scope of " "any of this user's roles." )
Grant a user a specified permission .
21,044
def revoke_permission ( user , permission_name ) : roles = get_user_roles ( user ) for role in roles : if permission_name in role . permission_names_list ( ) : permission = get_permission ( permission_name ) user . user_permissions . remove ( permission ) return raise RolePermissionScopeException ( "This permission isn't in the scope of " "any of this user's roles." )
Revoke a specified permission from a user .
21,045
def model_saved ( sender , instance , created , raw , using , ** kwargs ) : opts = get_opts ( instance ) model = '.' . join ( [ opts . app_label , opts . object_name ] ) action = 'created' if created else 'updated' distill_model_event ( instance , model , action )
Automatically triggers created and updated actions .
21,046
def model_deleted ( sender , instance , using , ** kwargs ) : opts = get_opts ( instance ) model = '.' . join ( [ opts . app_label , opts . object_name ] ) distill_model_event ( instance , model , 'deleted' )
Automatically triggers deleted actions .
21,047
def raw_custom_event ( sender , event_name , payload , user , send_hook_meta = True , instance = None , ** kwargs ) : HookModel = get_hook_model ( ) hooks = HookModel . objects . filter ( user = user , event = event_name ) for hook in hooks : new_payload = payload if send_hook_meta : new_payload = { 'hook' : hook . dict ( ) , 'data' : payload } hook . deliver_hook ( instance , payload_override = new_payload )
Give a full payload
21,048
def clean ( self ) : if self . event not in HOOK_EVENTS . keys ( ) : raise ValidationError ( "Invalid hook event {evt}." . format ( evt = self . event ) )
Validation for events .
21,049
def get_module ( path ) : try : from importlib import import_module except ImportError as e : from django . utils . importlib import import_module try : mod_name , func_name = path . rsplit ( '.' , 1 ) mod = import_module ( mod_name ) except ImportError as e : raise ImportError ( 'Error importing alert function {0}: "{1}"' . format ( mod_name , e ) ) try : func = getattr ( mod , func_name ) except AttributeError : raise ImportError ( ( 'Module "{0}" does not define a "{1}" function' ) . format ( mod_name , func_name ) ) return func
A modified duplicate from Django s built in backend retriever .
21,050
def get_hook_model ( ) : from rest_hooks . models import Hook HookModel = Hook if getattr ( settings , 'HOOK_CUSTOM_MODEL' , None ) : HookModel = get_module ( settings . HOOK_CUSTOM_MODEL ) return HookModel
Returns the Custom Hook model if defined in settings otherwise the default Hook model .
21,051
def find_and_fire_hook ( event_name , instance , user_override = None ) : try : from django . contrib . auth import get_user_model User = get_user_model ( ) except ImportError : from django . contrib . auth . models import User from rest_hooks . models import HOOK_EVENTS if not event_name in HOOK_EVENTS . keys ( ) : raise Exception ( '"{}" does not exist in `settings.HOOK_EVENTS`.' . format ( event_name ) ) filters = { 'event' : event_name } if user_override is not False : if user_override : filters [ 'user' ] = user_override elif hasattr ( instance , 'user' ) : filters [ 'user' ] = instance . user elif isinstance ( instance , User ) : filters [ 'user' ] = instance else : raise Exception ( '{} has no `user` property. REST Hooks needs this.' . format ( repr ( instance ) ) ) HookModel = get_hook_model ( ) hooks = HookModel . objects . filter ( ** filters ) for hook in hooks : hook . deliver_hook ( instance )
Look up Hooks that apply
21,052
def _get_plot_data ( data , ndim = None ) : out = data if isinstance ( data , PHATE ) : out = data . transform ( ) else : try : if isinstance ( data , anndata . AnnData ) : try : out = data . obsm [ 'X_phate' ] except KeyError : raise RuntimeError ( "data.obsm['X_phate'] not found. " "Please run `sc.tl.phate(adata)` before plotting." ) except NameError : pass if ndim is not None and out [ 0 ] . shape [ 0 ] < ndim : if isinstance ( data , PHATE ) : data . set_params ( n_components = ndim ) out = data . transform ( ) else : raise ValueError ( "Expected at least {}-dimensional data, got {}" . format ( ndim , out [ 0 ] . shape [ 0 ] ) ) return out
Get plot data out of an input object
21,053
def scatter ( x , y , z = None , c = None , cmap = None , s = None , discrete = None , ax = None , legend = None , figsize = None , xticks = False , yticks = False , zticks = False , xticklabels = True , yticklabels = True , zticklabels = True , label_prefix = "PHATE" , xlabel = None , ylabel = None , zlabel = None , title = None , legend_title = "" , legend_loc = 'best' , filename = None , dpi = None , ** plot_kwargs ) : warnings . warn ( "`phate.plot.scatter` is deprecated. " "Use `scprep.plot.scatter` instead." , FutureWarning ) return scprep . plot . scatter ( x = x , y = y , z = z , c = c , cmap = cmap , s = s , discrete = discrete , ax = ax , legend = legend , figsize = figsize , xticks = xticks , yticks = yticks , zticks = zticks , xticklabels = xticklabels , yticklabels = yticklabels , zticklabels = zticklabels , label_prefix = label_prefix , xlabel = xlabel , ylabel = ylabel , zlabel = zlabel , title = title , legend_title = legend_title , legend_loc = legend_loc , filename = filename , dpi = dpi , ** plot_kwargs )
Create a scatter plot
21,054
def scatter2d ( data , ** kwargs ) : warnings . warn ( "`phate.plot.scatter2d` is deprecated. " "Use `scprep.plot.scatter2d` instead." , FutureWarning ) data = _get_plot_data ( data , ndim = 2 ) return scprep . plot . scatter2d ( data , ** kwargs )
Create a 2D scatter plot
21,055
def rotate_scatter3d ( data , filename = None , elev = 30 , rotation_speed = 30 , fps = 10 , ax = None , figsize = None , dpi = None , ipython_html = "jshtml" , ** kwargs ) : warnings . warn ( "`phate.plot.rotate_scatter3d` is deprecated. " "Use `scprep.plot.rotate_scatter3d` instead." , FutureWarning ) return scprep . plot . rotate_scatter3d ( data , filename = filename , elev = elev , rotation_speed = rotation_speed , fps = fps , ax = ax , figsize = figsize , dpi = dpi , ipython_html = ipython_html , ** kwargs )
Create a rotating 3D scatter plot
21,056
def kmeans ( phate_op , k = 8 , random_state = None ) : if phate_op . graph is not None : diff_potential = phate_op . calculate_potential ( ) if isinstance ( phate_op . graph , graphtools . graphs . LandmarkGraph ) : diff_potential = phate_op . graph . interpolate ( diff_potential ) return cluster . KMeans ( k , random_state = random_state ) . fit_predict ( diff_potential ) else : raise exceptions . NotFittedError ( "This PHATE instance is not fitted yet. Call " "'fit' with appropriate arguments before " "using this method." )
KMeans on the PHATE potential
21,057
def cmdscale_fast ( D , ndim ) : tasklogger . log_debug ( "Performing classic MDS on {} of shape {}..." . format ( type ( D ) . __name__ , D . shape ) ) D = D ** 2 D = D - D . mean ( axis = 0 ) [ None , : ] D = D - D . mean ( axis = 1 ) [ : , None ] pca = PCA ( n_components = ndim , svd_solver = 'randomized' ) Y = pca . fit_transform ( D ) return Y
Fast CMDS using random SVD
21,058
def embed_MDS ( X , ndim = 2 , how = 'metric' , distance_metric = 'euclidean' , n_jobs = 1 , seed = None , verbose = 0 ) : if how not in [ 'classic' , 'metric' , 'nonmetric' ] : raise ValueError ( "Allowable 'how' values for MDS: 'classic', " "'metric', or 'nonmetric'. " "'{}' was passed." . format ( how ) ) X_dist = squareform ( pdist ( X , distance_metric ) ) Y = cmdscale_fast ( X_dist , ndim ) if how in [ 'metric' , 'nonmetric' ] : tasklogger . log_debug ( "Performing metric MDS on " "{} of shape {}..." . format ( type ( X_dist ) , X_dist . shape ) ) Y , _ = smacof ( X_dist , n_components = ndim , metric = True , max_iter = 3000 , eps = 1e-6 , random_state = seed , n_jobs = n_jobs , n_init = 1 , init = Y , verbose = verbose ) if how == 'nonmetric' : tasklogger . log_debug ( "Performing non-metric MDS on " "{} of shape {}..." . format ( type ( X_dist ) , X_dist . shape ) ) Y , _ = smacof ( X_dist , n_components = ndim , metric = True , max_iter = 3000 , eps = 1e-6 , random_state = seed , n_jobs = n_jobs , n_init = 1 , init = Y , verbose = verbose ) return Y
Performs classic metric and non - metric MDS
21,059
def compute_von_neumann_entropy ( data , t_max = 100 ) : _ , eigenvalues , _ = svd ( data ) entropy = [ ] eigenvalues_t = np . copy ( eigenvalues ) for _ in range ( t_max ) : prob = eigenvalues_t / np . sum ( eigenvalues_t ) prob = prob + np . finfo ( float ) . eps entropy . append ( - np . sum ( prob * np . log ( prob ) ) ) eigenvalues_t = eigenvalues_t * eigenvalues entropy = np . array ( entropy ) return np . array ( entropy )
Determines the Von Neumann entropy of data at varying matrix powers . The user should select a value of t around the knee of the entropy curve .
21,060
def check_positive ( ** params ) : for p in params : if not isinstance ( params [ p ] , numbers . Number ) or params [ p ] <= 0 : raise ValueError ( "Expected {} > 0, got {}" . format ( p , params [ p ] ) )
Check that parameters are positive as expected
21,061
def check_int ( ** params ) : for p in params : if not isinstance ( params [ p ] , numbers . Integral ) : raise ValueError ( "Expected {} integer, got {}" . format ( p , params [ p ] ) )
Check that parameters are integers as expected
21,062
def check_if_not ( x , * checks , ** params ) : for p in params : if params [ p ] is not x and params [ p ] != x : [ check ( ** { p : params [ p ] } ) for check in checks ]
Run checks only if parameters are not equal to a specified value
21,063
def check_in ( choices , ** params ) : for p in params : if params [ p ] not in choices : raise ValueError ( "{} value {} not recognized. Choose from {}" . format ( p , params [ p ] , choices ) )
Checks parameters are in a list of allowed parameters
21,064
def check_between ( v_min , v_max , ** params ) : for p in params : if params [ p ] < v_min or params [ p ] > v_max : raise ValueError ( "Expected {} between {} and {}, " "got {}" . format ( p , v_min , v_max , params [ p ] ) )
Checks parameters are in a specified range
21,065
def matrix_is_equivalent ( X , Y ) : return X is Y or ( isinstance ( X , Y . __class__ ) and X . shape == Y . shape and np . sum ( ( X != Y ) . sum ( ) ) == 0 )
Checks matrix equivalence with numpy scipy and pandas
21,066
def diff_op ( self ) : if self . graph is not None : if isinstance ( self . graph , graphtools . graphs . LandmarkGraph ) : diff_op = self . graph . landmark_op else : diff_op = self . graph . diff_op if sparse . issparse ( diff_op ) : diff_op = diff_op . toarray ( ) return diff_op else : raise NotFittedError ( "This PHATE instance is not fitted yet. Call " "'fit' with appropriate arguments before " "using this method." )
The diffusion operator calculated from the data
21,067
def _check_params ( self ) : utils . check_positive ( n_components = self . n_components , k = self . knn ) utils . check_int ( n_components = self . n_components , k = self . knn , n_jobs = self . n_jobs ) utils . check_between ( 0 , 1 , gamma = self . gamma ) utils . check_if_not ( None , utils . check_positive , a = self . decay ) utils . check_if_not ( None , utils . check_positive , utils . check_int , n_landmark = self . n_landmark , n_pca = self . n_pca ) utils . check_if_not ( 'auto' , utils . check_positive , utils . check_int , t = self . t ) if not callable ( self . knn_dist ) : utils . check_in ( [ 'euclidean' , 'precomputed' , 'cosine' , 'correlation' , 'cityblock' , 'l1' , 'l2' , 'manhattan' , 'braycurtis' , 'canberra' , 'chebyshev' , 'dice' , 'hamming' , 'jaccard' , 'kulsinski' , 'mahalanobis' , 'matching' , 'minkowski' , 'rogerstanimoto' , 'russellrao' , 'seuclidean' , 'sokalmichener' , 'sokalsneath' , 'sqeuclidean' , 'yule' , 'precomputed_affinity' , 'precomputed_distance' ] , knn_dist = self . knn_dist ) if not callable ( self . mds_dist ) : utils . check_in ( [ 'euclidean' , 'cosine' , 'correlation' , 'braycurtis' , 'canberra' , 'chebyshev' , 'cityblock' , 'dice' , 'hamming' , 'jaccard' , 'kulsinski' , 'mahalanobis' , 'matching' , 'minkowski' , 'rogerstanimoto' , 'russellrao' , 'seuclidean' , 'sokalmichener' , 'sokalsneath' , 'sqeuclidean' , 'yule' ] , mds_dist = self . mds_dist ) utils . check_in ( [ 'classic' , 'metric' , 'nonmetric' ] , mds = self . mds )
Check PHATE parameters
21,068
def fit ( self , X ) : X , n_pca , precomputed , update_graph = self . _parse_input ( X ) if precomputed is None : tasklogger . log_info ( "Running PHATE on {} cells and {} genes." . format ( X . shape [ 0 ] , X . shape [ 1 ] ) ) else : tasklogger . log_info ( "Running PHATE on precomputed {} matrix with {} cells." . format ( precomputed , X . shape [ 0 ] ) ) if self . n_landmark is None or X . shape [ 0 ] <= self . n_landmark : n_landmark = None else : n_landmark = self . n_landmark if self . graph is not None and update_graph : self . _update_graph ( X , precomputed , n_pca , n_landmark ) self . X = X if self . graph is None : tasklogger . log_start ( "graph and diffusion operator" ) self . graph = graphtools . Graph ( X , n_pca = n_pca , n_landmark = n_landmark , distance = self . knn_dist , precomputed = precomputed , knn = self . knn , decay = self . decay , thresh = 1e-4 , n_jobs = self . n_jobs , verbose = self . verbose , random_state = self . random_state , ** ( self . kwargs ) ) tasklogger . log_complete ( "graph and diffusion operator" ) self . diff_op return self
Computes the diffusion operator
21,069
def transform ( self , X = None , t_max = 100 , plot_optimal_t = False , ax = None ) : if self . graph is None : raise NotFittedError ( "This PHATE instance is not fitted yet. Call " "'fit' with appropriate arguments before " "using this method." ) elif X is not None and not utils . matrix_is_equivalent ( X , self . X ) : warnings . warn ( "Pre-fit PHATE cannot be used to transform a " "new data matrix. Please fit PHATE to the new" " data by running 'fit' with the new data." , RuntimeWarning ) if isinstance ( self . graph , graphtools . graphs . TraditionalGraph ) and self . graph . precomputed is not None : raise ValueError ( "Cannot transform additional data using a " "precomputed distance matrix." ) else : transitions = self . graph . extend_to_data ( X ) return self . graph . interpolate ( self . embedding , transitions ) else : diff_potential = self . calculate_potential ( t_max = t_max , plot_optimal_t = plot_optimal_t , ax = ax ) if self . embedding is None : tasklogger . log_start ( "{} MDS" . format ( self . mds ) ) self . embedding = mds . embed_MDS ( diff_potential , ndim = self . n_components , how = self . mds , distance_metric = self . mds_dist , n_jobs = self . n_jobs , seed = self . random_state , verbose = max ( self . verbose - 1 , 0 ) ) tasklogger . log_complete ( "{} MDS" . format ( self . mds ) ) if isinstance ( self . graph , graphtools . graphs . LandmarkGraph ) : tasklogger . log_debug ( "Extending to original data..." ) return self . graph . interpolate ( self . embedding ) else : return self . embedding
Computes the position of the cells in the embedding space
21,070
def fit_transform ( self , X , ** kwargs ) : tasklogger . log_start ( 'PHATE' ) self . fit ( X ) embedding = self . transform ( ** kwargs ) tasklogger . log_complete ( 'PHATE' ) return embedding
Computes the diffusion operator and the position of the cells in the embedding space
21,071
def calculate_potential ( self , t = None , t_max = 100 , plot_optimal_t = False , ax = None ) : if t is None : t = self . t if self . diff_potential is None : if t == 'auto' : t = self . optimal_t ( t_max = t_max , plot = plot_optimal_t , ax = ax ) else : t = self . t tasklogger . log_start ( "diffusion potential" ) diff_op_t = np . linalg . matrix_power ( self . diff_op , t ) if self . gamma == 1 : diff_op_t = diff_op_t + 1e-7 self . diff_potential = - 1 * np . log ( diff_op_t ) elif self . gamma == - 1 : self . diff_potential = diff_op_t else : c = ( 1 - self . gamma ) / 2 self . diff_potential = ( ( diff_op_t ) ** c ) / c tasklogger . log_complete ( "diffusion potential" ) elif plot_optimal_t : self . optimal_t ( t_max = t_max , plot = plot_optimal_t , ax = ax ) return self . diff_potential
Calculates the diffusion potential
21,072
def von_neumann_entropy ( self , t_max = 100 ) : t = np . arange ( t_max ) return t , vne . compute_von_neumann_entropy ( self . diff_op , t_max = t_max )
Calculate Von Neumann Entropy
21,073
def optimal_t ( self , t_max = 100 , plot = False , ax = None ) : tasklogger . log_start ( "optimal t" ) t , h = self . von_neumann_entropy ( t_max = t_max ) t_opt = vne . find_knee_point ( y = h , x = t ) tasklogger . log_info ( "Automatically selected t = {}" . format ( t_opt ) ) tasklogger . log_complete ( "optimal t" ) if plot : if ax is None : fig , ax = plt . subplots ( ) show = True else : show = False ax . plot ( t , h ) ax . scatter ( t_opt , h [ t == t_opt ] , marker = '*' , c = 'k' , s = 50 ) ax . set_xlabel ( "t" ) ax . set_ylabel ( "Von Neumann Entropy" ) ax . set_title ( "Optimal t = {}" . format ( t_opt ) ) if show : plt . show ( ) return t_opt
Find the optimal value of t
21,074
def a2bits ( chars : str ) -> str : return bin ( reduce ( lambda x , y : ( x << 8 ) + y , ( ord ( c ) for c in chars ) , 1 ) ) [ 3 : ]
Converts a string to its bits representation as a string of 0 s and 1 s .
21,075
def a2bits_list ( chars : str , encoding : str = "UTF-8" ) -> List [ str ] : return [ bin ( ord ( x ) ) [ 2 : ] . rjust ( ENCODINGS [ encoding ] , "0" ) for x in chars ]
Convert a string to its bits representation as a list of 0 s and 1 s .
21,076
def bs ( s : int ) -> str : return str ( s ) if s <= 1 else bs ( s >> 1 ) + str ( s & 1 )
Converts an int to its bits representation as a string of 0 s and 1 s .
21,077
def n_at_a_time ( items : List [ int ] , n : int , fillvalue : str ) -> Iterator [ Tuple [ Union [ int , str ] ] ] : it = iter ( items ) return itertools . zip_longest ( * [ it ] * n , fillvalue = fillvalue )
Returns an iterator which groups n items at a time . Any final partial tuple will be padded with the fillvalue
21,078
def open_image ( fname_or_instance : Union [ str , IO [ bytes ] ] ) : if isinstance ( fname_or_instance , Image . Image ) : return fname_or_instance return Image . open ( fname_or_instance )
Opens a Image and returns it .
21,079
def log_gen ( ) -> Iterator [ int ] : y = 1 while True : adder = max ( 1 , math . pow ( 10 , int ( math . log10 ( y ) ) ) ) yield int ( y ) y = y + int ( adder )
Logarithmic generator .
21,080
def deprecated ( replacement = None , version = None ) : def outer ( oldfun ) : def inner ( * args , ** kwargs ) : msg = "%s is deprecated" % oldfun . __name__ if version is not None : msg += "will be removed in version %s;" % version if replacement is not None : msg += "; use %s instead" % ( replacement ) warnings . warn ( msg , DeprecationWarning , stacklevel = 2 ) if callable ( replacement ) : return replacement ( * args , ** kwargs ) else : return oldfun ( * args , ** kwargs ) return inner return outer
A decorator which can be used to mark functions as deprecated . replacement is a callable that will be called with the same args as the decorated function . >>> import pytest >>>
21,081
def get_classname ( o ) : if inspect . isclass ( o ) : target = o elif callable ( o ) : target = o else : target = o . __class__ try : return target . __qualname__ except AttributeError : return target . __name__
Returns the classname of an object r a class
21,082
def _handler ( self , sender , setting , value , ** kwargs ) : if setting . startswith ( self . prefix ) : self . _set_attr ( setting , value )
handler for setting_changed signal .
21,083
def get_version ( model_instance , version ) : version_field = get_version_fieldname ( model_instance ) kwargs = { 'pk' : model_instance . pk , version_field : version } return model_instance . __class__ . objects . get ( ** kwargs )
try go load from the database one object with specific version
21,084
def conflict ( request , target = None , template_name = '409.html' ) : try : template = loader . get_template ( template_name ) except TemplateDoesNotExist : template = Template ( '<h1>Conflict</h1>' '<p>The request was unsuccessful due to a conflict. ' 'The object changed during the transaction.</p>' ) try : saved = target . __class__ . _default_manager . get ( pk = target . pk ) except target . __class__ . DoesNotExist : saved = None ctx = { 'target' : target , 'saved' : saved , 'request_path' : request . path } return ConflictResponse ( template . render ( ctx ) )
409 error handler .
21,085
def add_arguments ( self , parser ) : subparsers = parser . add_subparsers ( help = 'sub-command help' , dest = 'command' ) add_parser = partial ( _add_subparser , subparsers , parser ) add_parser ( 'list' , help = "list concurrency triggers" ) add_parser ( 'drop' , help = "drop concurrency triggers" ) add_parser ( 'create' , help = "create concurrency triggers" ) parser . add_argument ( '-d' , '--database' , action = 'store' , dest = 'database' , default = None , help = 'limit to this database' ) parser . add_argument ( '-t' , '--trigger' , action = 'store' , dest = 'trigger' , default = None , help = 'limit to this trigger name' )
Entry point for subclassed commands to add custom arguments .
21,086
def action_checkbox ( self , obj ) : if self . check_concurrent_action : return helpers . checkbox . render ( helpers . ACTION_CHECKBOX_NAME , force_text ( "%s,%s" % ( obj . pk , get_revision_of_object ( obj ) ) ) ) else : return super ( ConcurrencyActionMixin , self ) . action_checkbox ( obj )
A list_display column containing a checkbox widget .
21,087
def _management_form ( self ) : if self . is_bound : form = ConcurrentManagementForm ( self . data , auto_id = self . auto_id , prefix = self . prefix ) if not form . is_valid ( ) : raise ValidationError ( 'ManagementForm data is missing or has been tampered with' ) else : form = ConcurrentManagementForm ( auto_id = self . auto_id , prefix = self . prefix , initial = { TOTAL_FORM_COUNT : self . total_form_count ( ) , INITIAL_FORM_COUNT : self . initial_form_count ( ) , MAX_NUM_FORM_COUNT : self . max_num } , versions = [ ( form . instance . pk , get_revision_of_object ( form . instance ) ) for form in self . initial_forms ] ) return form
Returns the ManagementForm instance for this FormSet .
21,088
def cipherprefs ( self ) : if 'PreferredSymmetricAlgorithms' in self . _signature . subpackets : return next ( iter ( self . _signature . subpackets [ 'h_PreferredSymmetricAlgorithms' ] ) ) . flags return [ ]
A list of preferred symmetric algorithms specified in this signature if any . Otherwise an empty list .
21,089
def compprefs ( self ) : if 'PreferredCompressionAlgorithms' in self . _signature . subpackets : return next ( iter ( self . _signature . subpackets [ 'h_PreferredCompressionAlgorithms' ] ) ) . flags return [ ]
A list of preferred compression algorithms specified in this signature if any . Otherwise an empty list .
21,090
def exportable ( self ) : if 'ExportableCertification' in self . _signature . subpackets : return bool ( next ( iter ( self . _signature . subpackets [ 'ExportableCertification' ] ) ) ) return True
False if this signature is marked as being not exportable . Otherwise True .
21,091
def features ( self ) : if 'Features' in self . _signature . subpackets : return next ( iter ( self . _signature . subpackets [ 'Features' ] ) ) . flags return set ( )
A set of implementation features specified in this signature if any . Otherwise an empty set .
21,092
def hashprefs ( self ) : if 'PreferredHashAlgorithms' in self . _signature . subpackets : return next ( iter ( self . _signature . subpackets [ 'h_PreferredHashAlgorithms' ] ) ) . flags return [ ]
A list of preferred hash algorithms specified in this signature if any . Otherwise an empty list .
21,093
def is_expired ( self ) : expires_at = self . expires_at if expires_at is not None and expires_at != self . created : return expires_at < datetime . utcnow ( ) return False
True if the signature has an expiration date and is expired . Otherwise False
21,094
def keyserver ( self ) : if 'PreferredKeyServer' in self . _signature . subpackets : return next ( iter ( self . _signature . subpackets [ 'h_PreferredKeyServer' ] ) ) . uri return ''
The preferred key server specified in this signature if any . Otherwise an empty str .
21,095
def notation ( self ) : return dict ( ( nd . name , nd . value ) for nd in self . _signature . subpackets [ 'NotationData' ] )
A dict of notation data in this signature if any . Otherwise an empty dict .
21,096
def policy_uri ( self ) : if 'Policy' in self . _signature . subpackets : return next ( iter ( self . _signature . subpackets [ 'Policy' ] ) ) . uri return ''
The policy URI specified in this signature if any . Otherwise an empty str .
21,097
def revocable ( self ) : if 'Revocable' in self . _signature . subpackets : return bool ( next ( iter ( self . _signature . subpackets [ 'Revocable' ] ) ) ) return True
False if this signature is marked as being not revocable . Otherwise True .
21,098
def hashdata ( self , subject ) : _data = bytearray ( ) if isinstance ( subject , six . string_types ) : subject = subject . encode ( 'charmap' ) if self . type == SignatureType . BinaryDocument : if isinstance ( subject , ( SKEData , IntegrityProtectedSKEData ) ) : _data += subject . __bytearray__ ( ) else : _data += bytearray ( subject ) if self . type == SignatureType . CanonicalDocument : _data += re . subn ( br'\r?\n' , b'\r\n' , subject ) [ 0 ] if self . type in { SignatureType . Generic_Cert , SignatureType . Persona_Cert , SignatureType . Casual_Cert , SignatureType . Positive_Cert , SignatureType . CertRevocation , SignatureType . Subkey_Binding , SignatureType . PrimaryKey_Binding } : _s = b'' if isinstance ( subject , PGPUID ) : _s = subject . _parent . hashdata elif isinstance ( subject , PGPKey ) and not subject . is_primary : _s = subject . _parent . hashdata elif isinstance ( subject , PGPKey ) and subject . is_primary : _s = subject . hashdata if len ( _s ) > 0 : _data += b'\x99' + self . int_to_bytes ( len ( _s ) , 2 ) + _s if self . type in { SignatureType . Subkey_Binding , SignatureType . PrimaryKey_Binding } : if subject . is_primary : _s = subject . subkeys [ self . signer ] . hashdata else : _s = subject . hashdata _data += b'\x99' + self . int_to_bytes ( len ( _s ) , 2 ) + _s if self . type in { SignatureType . KeyRevocation , SignatureType . SubkeyRevocation , SignatureType . DirectlyOnKey } : if self . type == SignatureType . SubkeyRevocation : _s = subject . parent . hashdata _data += b'\x99' + self . int_to_bytes ( len ( _s ) , 2 ) + _s _s = subject . hashdata _data += b'\x99' + self . int_to_bytes ( len ( _s ) , 2 ) + _s if self . type in { SignatureType . Generic_Cert , SignatureType . Persona_Cert , SignatureType . Casual_Cert , SignatureType . Positive_Cert , SignatureType . CertRevocation } : _s = subject . hashdata if subject . is_uid : _data += b'\xb4' else : _data += b'\xd1' _data += self . int_to_bytes ( len ( _s ) , 4 ) + _s if 0 in list ( self . _signature . signature ) : self . _signature . update_hlen ( ) hcontext = bytearray ( ) hcontext . append ( self . _signature . header . version if not self . embedded else self . _signature . _sig . header . version ) hcontext . append ( self . type ) hcontext . append ( self . key_algorithm ) hcontext . append ( self . hash_algorithm ) hcontext += self . _signature . subpackets . __hashbytearray__ ( ) hlen = len ( hcontext ) _data += hcontext _data += b'\x04\xff' _data += self . int_to_bytes ( hlen , 4 ) return bytes ( _data )
All signatures are formed by producing a hash over the signature data and then using the resulting hash in the signature algorithm .
21,099
def image ( self ) : return self . _uid . image . image if isinstance ( self . _uid , UserAttribute ) else None
If this is a User Attribute this will be the stored image . If this is not a User Attribute this will be None .