idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
19,100
def _sum_string_fn ( into , tmp_val , gender , items = None ) : if items is None : items = ( u"" , u"" , u"" ) one_item , two_items , five_items = items check_positive ( tmp_val ) if tmp_val == 0 : return into , tmp_val words = [ ] rest = tmp_val % 1000 tmp_val = tmp_val // 1000 if rest == 0 : if into == u"" : into = u"%s " % five_items return into , tmp_val end_word = five_items words . append ( HUNDREDS [ rest // 100 ] ) rest = rest % 100 rest1 = rest // 10 tens = rest1 == 1 and TENS [ rest ] or TENS [ rest1 ] words . append ( tens ) if rest1 < 1 or rest1 > 1 : amount = rest % 10 end_word = choose_plural ( amount , items ) words . append ( ONES [ amount ] [ gender - 1 ] ) words . append ( end_word ) words . append ( into ) words = filter ( lambda x : len ( x ) > 0 , words ) return u" " . join ( words ) . strip ( ) , tmp_val
Make in - words representation of single order
19,101
def check_length ( value , length ) : _length = len ( value ) if _length != length : raise ValueError ( "length must be %d, not %d" % ( length , _length ) )
Checks length of value
19,102
def check_positive ( value , strict = False ) : if not strict and value < 0 : raise ValueError ( "Value must be positive or zero, not %s" % str ( value ) ) if strict and value <= 0 : raise ValueError ( "Value must be positive, not %s" % str ( value ) )
Checks if variable is positive
19,103
def detranslify ( text ) : try : res = translit . detranslify ( text ) except Exception as err : res = default_value % { 'error' : err , 'value' : text } return res
Detranslify russian text
19,104
def apply ( diff , recs , strict = True ) : index_columns = diff [ '_index' ] indexed = records . index ( copy . deepcopy ( list ( recs ) ) , index_columns ) _add_records ( indexed , diff [ 'added' ] , index_columns , strict = strict ) _remove_records ( indexed , diff [ 'removed' ] , index_columns , strict = strict ) _update_records ( indexed , diff [ 'changed' ] , strict = strict ) return records . sort ( indexed . values ( ) )
Transform the records with the patch . May fail if the records do not match those expected in the patch .
19,105
def load ( istream , strict = True ) : "Deserialize a patch object." try : diff = json . load ( istream ) if strict : jsonschema . validate ( diff , SCHEMA ) except ValueError : raise InvalidPatchError ( 'patch is not valid JSON' ) except jsonschema . exceptions . ValidationError as e : raise InvalidPatchError ( e . message ) return diff
Deserialize a patch object .
19,106
def save ( diff , stream = sys . stdout , compact = False ) : "Serialize a patch object." flags = { 'sort_keys' : True } if not compact : flags [ 'indent' ] = 2 json . dump ( diff , stream , ** flags )
Serialize a patch object .
19,107
def create ( from_records , to_records , index_columns , ignore_columns = None ) : from_indexed = records . index ( from_records , index_columns ) to_indexed = records . index ( to_records , index_columns ) if ignore_columns is not None : from_indexed = records . filter_ignored ( from_indexed , ignore_columns ) to_indexed = records . filter_ignored ( to_indexed , ignore_columns ) return create_indexed ( from_indexed , to_indexed , index_columns )
Diff two sets of records using the index columns as the primary key for both datasets .
19,108
def _compare_rows ( from_recs , to_recs , keys ) : "Return the set of keys which have changed." return set ( k for k in keys if sorted ( from_recs [ k ] . items ( ) ) != sorted ( to_recs [ k ] . items ( ) ) )
Return the set of keys which have changed .
19,109
def record_diff ( lhs , rhs ) : "Diff an individual row." delta = { } for k in set ( lhs ) . union ( rhs ) : from_ = lhs [ k ] to_ = rhs [ k ] if from_ != to_ : delta [ k ] = { 'from' : from_ , 'to' : to_ } return delta
Diff an individual row .
19,110
def filter_significance ( diff , significance ) : changed = diff [ 'changed' ] reduced = [ { 'key' : delta [ 'key' ] , 'fields' : { k : v for k , v in delta [ 'fields' ] . items ( ) if _is_significant ( v , significance ) } } for delta in changed ] filtered = [ delta for delta in reduced if delta [ 'fields' ] ] diff = diff . copy ( ) diff [ 'changed' ] = filtered return diff
Prune any changes in the patch which are due to numeric changes less than this level of significance .
19,111
def _is_significant ( change , significance ) : try : a = float ( change [ 'from' ] ) b = float ( change [ 'to' ] ) except ValueError : return True return abs ( a - b ) > 10 ** ( - significance )
Return True if a change is genuinely significant given our tolerance .
19,112
def diff_files ( from_file , to_file , index_columns , sep = ',' , ignored_columns = None ) : with open ( from_file ) as from_stream : with open ( to_file ) as to_stream : from_records = records . load ( from_stream , sep = sep ) to_records = records . load ( to_stream , sep = sep ) return patch . create ( from_records , to_records , index_columns , ignore_columns = ignored_columns )
Diff two CSV files returning the patch which transforms one into the other .
19,113
def patch_file ( patch_stream : TextIO , fromcsv_stream : TextIO , tocsv_stream : TextIO , strict : bool = True , sep : str = ',' ) : diff = patch . load ( patch_stream ) from_records = records . load ( fromcsv_stream , sep = sep ) to_records = patch . apply ( diff , from_records , strict = strict ) if to_records : all_columns = to_records [ 0 ] . keys ( ) index_columns = diff [ '_index' ] fieldnames = _nice_fieldnames ( all_columns , index_columns ) else : fieldnames = from_records . fieldnames records . save ( to_records , fieldnames , tocsv_stream )
Apply the patch to the source CSV file and save the result to the target file .
19,114
def patch_records ( diff , from_records , strict = True ) : return patch . apply ( diff , from_records , strict = strict )
Apply the patch to the sequence of records returning the transformed records .
19,115
def _nice_fieldnames ( all_columns , index_columns ) : "Indexes on the left, other fields in alphabetical order on the right." non_index_columns = set ( all_columns ) . difference ( index_columns ) return index_columns + sorted ( non_index_columns )
Indexes on the left other fields in alphabetical order on the right .
19,116
def csvdiff_cmd ( index_columns , from_csv , to_csv , style = None , output = None , sep = ',' , quiet = False , ignore_columns = None , significance = None ) : if ignore_columns is not None : for i in ignore_columns : if i in index_columns : error . abort ( "You can't ignore an index column" ) ostream = ( open ( output , 'w' ) if output else io . StringIO ( ) if quiet else sys . stdout ) try : if style == 'summary' : _diff_and_summarize ( from_csv , to_csv , index_columns , ostream , sep = sep , ignored_columns = ignore_columns , significance = significance ) else : compact = ( style == 'compact' ) _diff_files_to_stream ( from_csv , to_csv , index_columns , ostream , compact = compact , sep = sep , ignored_columns = ignore_columns , significance = significance ) except records . InvalidKeyError as e : error . abort ( e . args [ 0 ] ) finally : ostream . close ( )
Compare two csv files to see what rows differ between them . The files are each expected to have a header row and for each row to be uniquely identified by one or more indexing columns .
19,117
def _diff_and_summarize ( from_csv , to_csv , index_columns , stream = sys . stdout , sep = ',' , ignored_columns = None , significance = None ) : from_records = list ( records . load ( from_csv , sep = sep ) ) to_records = records . load ( to_csv , sep = sep ) diff = patch . create ( from_records , to_records , index_columns , ignored_columns ) if significance is not None : diff = patch . filter_significance ( diff , significance ) _summarize_diff ( diff , len ( from_records ) , stream = stream ) exit_code = ( EXIT_SAME if patch . is_empty ( diff ) else EXIT_DIFFERENT ) sys . exit ( exit_code )
Print a summary of the difference between the two files .
19,118
def csvpatch_cmd ( input_csv , input = None , output = None , strict = True ) : patch_stream = ( sys . stdin if input is None else open ( input ) ) tocsv_stream = ( sys . stdout if output is None else open ( output , 'w' ) ) fromcsv_stream = open ( input_csv ) try : patch_file ( patch_stream , fromcsv_stream , tocsv_stream , strict = strict ) except patch . InvalidPatchError as e : error . abort ( 'reading patch, {0}' . format ( e . args [ 0 ] ) ) finally : patch_stream . close ( ) fromcsv_stream . close ( ) tocsv_stream . close ( )
Apply the changes from a csvdiff patch to an existing CSV file .
19,119
def sort ( records : Sequence [ Record ] ) -> List [ Record ] : "Sort records into a canonical order, suitable for comparison." return sorted ( records , key = _record_key )
Sort records into a canonical order suitable for comparison .
19,120
def _record_key ( record : Record ) -> List [ Tuple [ Column , str ] ] : "An orderable representation of this record." return sorted ( record . items ( ) )
An orderable representation of this record .
19,121
def getargspecs ( func ) : if func is None : raise TypeError ( 'None is not a Python function' ) if hasattr ( func , 'ch_func' ) : return getargspecs ( func . ch_func ) elif hasattr ( func , 'ov_func' ) : return getargspecs ( func . ov_func ) if hasattr ( inspect , 'getfullargspec' ) : return inspect . getfullargspec ( func ) else : return inspect . getargspec ( func )
Bridges inspect . getargspec and inspect . getfullargspec . Automatically selects the proper one depending of current Python version . Automatically bypasses wrappers from typechecked - and override - decorators .
19,122
def get_required_kwonly_args ( argspecs ) : try : kwonly = argspecs . kwonlyargs if argspecs . kwonlydefaults is None : return kwonly res = [ ] for name in kwonly : if not name in argspecs . kwonlydefaults : res . append ( name ) return res except AttributeError : return [ ]
Determines whether given argspecs implies required keywords - only args and returns them as a list . Returns empty list if no such args exist .
19,123
def getargnames ( argspecs , with_unbox = False ) : args = argspecs . args vargs = argspecs . varargs try : kw = argspecs . keywords except AttributeError : kw = argspecs . varkw try : kwonly = argspecs . kwonlyargs except AttributeError : kwonly = None res = [ ] if not args is None : res . extend ( args ) if not vargs is None : res . append ( '*' + vargs if with_unbox else vargs ) if not kwonly is None : res . extend ( kwonly ) if not kw is None : res . append ( '**' + kw if with_unbox else kw ) return res
Resembles list of arg - names as would be seen in a function signature including var - args var - keywords and keyword - only args .
19,124
def get_class_that_defined_method ( meth ) : if is_classmethod ( meth ) : return meth . __self__ if hasattr ( meth , 'im_class' ) : return meth . im_class elif hasattr ( meth , '__qualname__' ) : try : cls_names = meth . __qualname__ . split ( '.<locals>' , 1 ) [ 0 ] . rsplit ( '.' , 1 ) [ 0 ] . split ( '.' ) cls = inspect . getmodule ( meth ) for cls_name in cls_names : cls = getattr ( cls , cls_name ) if isinstance ( cls , type ) : return cls except AttributeError : pass raise ValueError ( str ( meth ) + ' is not a method.' )
Determines the class owning the given method .
19,125
def is_classmethod ( meth ) : if inspect . ismethoddescriptor ( meth ) : return isinstance ( meth , classmethod ) if not inspect . ismethod ( meth ) : return False if not inspect . isclass ( meth . __self__ ) : return False if not hasattr ( meth . __self__ , meth . __name__ ) : return False return meth == getattr ( meth . __self__ , meth . __name__ )
Detects if the given callable is a classmethod .
19,126
def get_current_args ( caller_level = 0 , func = None , argNames = None ) : if argNames is None : argNames = getargnames ( getargspecs ( func ) ) if func is None : func = get_current_function ( caller_level + 1 ) if isinstance ( func , property ) : func = func . fget if func . fset is None else func . fset stck = inspect . stack ( ) lcs = stck [ 1 + caller_level ] [ 0 ] . f_locals return tuple ( [ lcs [ t ] for t in argNames ] )
Determines the args of current function call . Use caller_level > 0 to get args of even earlier function calls in current stack .
19,127
def getmodule ( code ) : try : md = inspect . getmodule ( code , code . co_filename ) except AttributeError : return inspect . getmodule ( code ) if md is None : cfname = code . co_filename . replace ( '__pyclasspath__' , os . path . realpath ( '' ) + os . sep + '__pyclasspath__' ) cfname = cfname . replace ( '$py.class' , '.py' ) md = inspect . getmodule ( code , cfname ) if md is None : md = inspect . getmodule ( code ) return md
More robust variant of inspect . getmodule . E . g . has less issues on Jython .
19,128
def _calc_traceback_limit ( tb ) : limit = 1 tb2 = tb while not tb2 . tb_next is None : try : maybe_pytypes = tb2 . tb_next . tb_frame . f_code . co_filename . split ( os . sep ) [ - 2 ] except IndexError : maybe_pytypes = None if maybe_pytypes == 'pytypes' and not tb2 . tb_next . tb_frame . f_code == pytypes . typechecker . _pytypes import__ . __code__ : break else : limit += 1 tb2 = tb2 . tb_next return limit
Calculates limit - parameter to strip away pytypes internals when used with API from traceback module .
19,129
def _pytypes_excepthook ( exctype , value , tb ) : if pytypes . clean_traceback and issubclass ( exctype , TypeError ) : traceback . print_exception ( exctype , value , tb , _calc_traceback_limit ( tb ) ) else : if _sys_excepthook is None : sys . __excepthook__ ( exctype , value , tb ) else : _sys_excepthook ( exctype , value , tb )
An excepthook suitable for use as sys . excepthook that strips away the part of the traceback belonging to pytypes internals . Can be switched on and off via pytypes . clean_traceback or pytypes . set_clean_traceback . The latter automatically installs this hook in sys . excepthook .
19,130
def get_generator_type ( genr ) : if genr in _checked_generator_types : return _checked_generator_types [ genr ] if not genr . gi_frame is None and 'gen_type' in genr . gi_frame . f_locals : return genr . gi_frame . f_locals [ 'gen_type' ] else : cllble , nesting , slf = util . get_callable_fq_for_code ( genr . gi_code ) if cllble is None : return Generator return _funcsigtypes ( cllble , slf , nesting [ - 1 ] if slf else None , genr . gi_frame . f_globals if not genr . gi_frame is None else None ) [ 1 ]
Obtains PEP 484 style type of a generator object i . e . returns a typing . Generator object .
19,131
def get_Generic_parameters ( tp , generic_supertype ) : try : res = _select_Generic_superclass_parameters ( tp , generic_supertype ) except TypeError : res = None if res is None : raise TypeError ( "%s has no proper parameters defined by %s." % ( type_str ( tp ) , type_str ( generic_supertype ) ) ) else : return tuple ( res )
tp must be a subclass of generic_supertype . Retrieves the type values from tp that correspond to parameters defined by generic_supertype .
19,132
def get_Tuple_params ( tpl ) : try : return tpl . __tuple_params__ except AttributeError : try : if tpl . __args__ is None : return None if tpl . __args__ [ 0 ] == ( ) : return ( ) else : if tpl . __args__ [ - 1 ] is Ellipsis : return tpl . __args__ [ : - 1 ] if len ( tpl . __args__ ) > 1 else None else : return tpl . __args__ except AttributeError : return None
Python version independent function to obtain the parameters of a typing . Tuple object . Omits the ellipsis argument if present . Use is_Tuple_ellipsis for that . Tested with CPython 2 . 7 3 . 5 3 . 6 and Jython 2 . 7 . 1 .
19,133
def is_Tuple_ellipsis ( tpl ) : try : return tpl . __tuple_use_ellipsis__ except AttributeError : try : if tpl . __args__ is None : return False if tpl . __args__ [ - 1 ] is Ellipsis : return True except AttributeError : pass return False
Python version independent function to check if a typing . Tuple object contains an ellipsis .
19,134
def is_Union ( tp ) : if tp is Union : return True try : return tp . __origin__ is Union except AttributeError : try : return isinstance ( tp , typing . UnionMeta ) except AttributeError : return False
Python version independent check if a type is typing . Union . Tested with CPython 2 . 7 3 . 5 3 . 6 and Jython 2 . 7 . 1 .
19,135
def is_builtin_type ( tp ) : return hasattr ( __builtins__ , tp . __name__ ) and tp is getattr ( __builtins__ , tp . __name__ )
Checks if the given type is a builtin one .
19,136
def get_types ( func ) : return _get_types ( func , util . is_classmethod ( func ) , util . is_method ( func ) )
Works like get_type_hints but returns types as a sequence rather than a dictionary . Types are returned in declaration order of the corresponding arguments .
19,137
def get_member_types ( obj , member_name , prop_getter = False ) : cls = obj . __class__ member = getattr ( cls , member_name ) slf = not ( isinstance ( member , staticmethod ) or isinstance ( member , classmethod ) ) clsm = isinstance ( member , classmethod ) return _get_types ( member , clsm , slf , cls , prop_getter )
Still experimental incomplete and hardly tested . Works like get_types but is also applicable to descriptors .
19,138
def _get_types ( func , clsm , slf , clss = None , prop_getter = False , unspecified_type = Any , infer_defaults = None ) : func0 = util . _actualfunc ( func , prop_getter ) if not slf : argNames = util . getargnames ( util . getargspecs ( func0 ) ) if len ( argNames ) > 0 : if clsm : if argNames [ 0 ] != 'cls' : util . _warn_argname ( 'classmethod using non-idiomatic cls argname' , func0 , slf , clsm , clss ) if clss is None and ( slf or clsm ) : if slf : assert util . is_method ( func ) or isinstance ( func , property ) if clsm : assert util . is_classmethod ( func ) clss = util . get_class_that_defined_method ( func ) assert hasattr ( clss , func . __name__ ) args , res = _funcsigtypes ( func , slf or clsm , clss , None , prop_getter , unspecified_type = unspecified_type , infer_defaults = infer_defaults ) return _match_stub_type ( args ) , _match_stub_type ( res )
Helper for get_types and get_member_types .
19,139
def _get_type_hints ( func , args = None , res = None , infer_defaults = None ) : if args is None or res is None : args2 , res2 = _get_types ( func , util . is_classmethod ( func ) , util . is_method ( func ) , unspecified_type = type ( NotImplemented ) , infer_defaults = infer_defaults ) if args is None : args = args2 if res is None : res = res2 slf = 1 if util . is_method ( func ) else 0 argNames = util . getargnames ( util . getargspecs ( util . _actualfunc ( func ) ) ) result = { } if not args is Any : prms = get_Tuple_params ( args ) for i in range ( slf , len ( argNames ) ) : if not prms [ i - slf ] is type ( NotImplemented ) : result [ argNames [ i ] ] = prms [ i - slf ] result [ 'return' ] = res return result
Helper for get_type_hints .
19,140
def _issubclass_Mapping_covariant ( subclass , superclass , bound_Generic , bound_typevars , bound_typevars_readonly , follow_fwd_refs , _recursion_check ) : if is_Generic ( subclass ) : if subclass . __origin__ is None or not issubclass ( subclass . __origin__ , Mapping ) : return _issubclass_Generic ( subclass , superclass , bound_Generic , bound_typevars , bound_typevars_readonly , follow_fwd_refs , _recursion_check ) if superclass . __args__ is None : if not pytypes . check_unbound_types : raise TypeError ( "Attempted to check unbound mapping type(superclass): " + str ( superclass ) ) if pytypes . strict_unknown_check : return False super_args = ( Any , Any ) else : super_args = superclass . __args__ if subclass . __args__ is None : if not pytypes . check_unbound_types : raise TypeError ( "Attempted to check unbound mapping type(subclass): " + str ( subclass ) ) if pytypes . strict_unknown_check : return False sub_args = ( Any , Any ) else : sub_args = subclass . __args__ if not _issubclass ( sub_args [ 0 ] , super_args [ 0 ] , bound_Generic , bound_typevars , bound_typevars_readonly , follow_fwd_refs , _recursion_check ) : return False if not _issubclass ( sub_args [ 1 ] , super_args [ 1 ] , bound_Generic , bound_typevars , bound_typevars_readonly , follow_fwd_refs , _recursion_check ) : return False return True return issubclass ( subclass , superclass )
Helper for _issubclass a . k . a pytypes . issubtype . This subclass - check treats Mapping - values as covariant .
19,141
def _issubclass_Union_rec ( subclass , superclass , bound_Generic , bound_typevars , bound_typevars_readonly , follow_fwd_refs , _recursion_check ) : super_args = get_Union_params ( superclass ) if super_args is None : return is_Union ( subclass ) elif is_Union ( subclass ) : sub_args = get_Union_params ( subclass ) if sub_args is None : return False return all ( _issubclass ( c , superclass , bound_Generic , bound_typevars , bound_typevars_readonly , follow_fwd_refs , _recursion_check ) for c in ( sub_args ) ) elif isinstance ( subclass , TypeVar ) : if subclass in super_args : return True if subclass . __constraints__ : return _issubclass ( Union [ subclass . __constraints__ ] , superclass , bound_Generic , bound_typevars , bound_typevars_readonly , follow_fwd_refs , _recursion_check ) return False else : return any ( _issubclass ( subclass , t , bound_Generic , bound_typevars , bound_typevars_readonly , follow_fwd_refs , _recursion_check ) for t in super_args )
Helper for _issubclass_Union .
19,142
def _isinstance ( obj , cls , bound_Generic = None , bound_typevars = None , bound_typevars_readonly = False , follow_fwd_refs = True , _recursion_check = None ) : if bound_typevars is None : bound_typevars = { } if is_Generic ( cls ) and cls . __origin__ is typing . Iterable : if not is_iterable ( obj ) : return False itp = get_iterable_itemtype ( obj ) if itp is None : return not pytypes . check_iterables else : return _issubclass ( itp , cls . __args__ [ 0 ] , bound_Generic , bound_typevars , bound_typevars_readonly , follow_fwd_refs , _recursion_check ) if is_Callable ( cls ) : return _isinstance_Callable ( obj , cls , bound_Generic , bound_typevars , bound_typevars_readonly , follow_fwd_refs , _recursion_check ) return _issubclass ( deep_type ( obj ) , cls , bound_Generic , bound_typevars , bound_typevars_readonly , follow_fwd_refs , _recursion_check )
Access this via pytypes . is_of_type . Works like isinstance but supports PEP 484 style types from typing module .
19,143
def generator_checker_py3 ( gen , gen_type , bound_Generic , bound_typevars , bound_typevars_readonly , follow_fwd_refs , _recursion_check ) : initialized = False sn = None try : while True : a = gen . send ( sn ) if initialized or not a is None : if not gen_type . __args__ [ 0 ] is Any and not _isinstance ( a , gen_type . __args__ [ 0 ] , bound_Generic , bound_typevars , bound_typevars_readonly , follow_fwd_refs , _recursion_check ) : tpa = deep_type ( a ) msg = _make_generator_error_message ( deep_type ( a ) , gen , gen_type . __args__ [ 0 ] , 'has incompatible yield type' ) _raise_typecheck_error ( msg , True , a , tpa , gen_type . __args__ [ 0 ] ) initialized = True sn = yield a if not gen_type . __args__ [ 1 ] is Any and not _isinstance ( sn , gen_type . __args__ [ 1 ] , bound_Generic , bound_typevars , bound_typevars_readonly , follow_fwd_refs , _recursion_check ) : tpsn = deep_type ( sn ) msg = _make_generator_error_message ( tpsn , gen , gen_type . __args__ [ 1 ] , 'has incompatible send type' ) _raise_typecheck_error ( msg , False , sn , tpsn , gen_type . __args__ [ 1 ] ) except StopIteration as st : if not gen_type . __args__ [ 2 ] is Any and not _isinstance ( st . value , gen_type . __args__ [ 2 ] , bound_Generic , bound_typevars , bound_typevars_readonly , follow_fwd_refs , _recursion_check ) : tpst = deep_type ( st . value ) msg = _make_generator_error_message ( tpst , gen , gen_type . __args__ [ 2 ] , 'has incompatible return type' ) _raise_typecheck_error ( msg , True , st . value , tpst , gen_type . __args__ [ 2 ] ) raise st
Builds a typechecking wrapper around a Python 3 style generator object .
19,144
def generator_checker_py2 ( gen , gen_type , bound_Generic , bound_typevars , bound_typevars_readonly , follow_fwd_refs , _recursion_check ) : initialized = False sn = None while True : a = gen . send ( sn ) if initialized or not a is None : if not gen_type . __args__ [ 0 ] is Any and not _isinstance ( a , gen_type . __args__ [ 0 ] , bound_Generic , bound_typevars , bound_typevars_readonly , follow_fwd_refs , _recursion_check ) : tpa = deep_type ( a ) msg = _make_generator_error_message ( tpa , gen , gen_type . __args__ [ 0 ] , 'has incompatible yield type' ) _raise_typecheck_error ( msg , True , a , tpa , gen_type . __args__ [ 0 ] ) initialized = True sn = yield a if not gen_type . __args__ [ 1 ] is Any and not _isinstance ( sn , gen_type . __args__ [ 1 ] , bound_Generic , bound_typevars , bound_typevars_readonly , follow_fwd_refs , _recursion_check ) : tpsn = deep_type ( sn ) msg = _make_generator_error_message ( tpsn , gen , gen_type . __args__ [ 1 ] , 'has incompatible send type' ) _raise_typecheck_error ( msg , False , sn , tpsn , gen_type . __args__ [ 1 ] )
Builds a typechecking wrapper around a Python 2 style generator object .
19,145
def annotations_func ( func ) : if not has_type_hints ( func ) : func . __annotations__ = { } func . __annotations__ = _get_type_hints ( func , infer_defaults = False ) return func
Works like annotations but is only applicable to functions methods and properties .
19,146
def annotations_class ( cls ) : assert ( isclass ( cls ) ) keys = [ key for key in cls . __dict__ ] for key in keys : memb = cls . __dict__ [ key ] if _check_as_func ( memb ) : annotations_func ( memb ) elif isclass ( memb ) : annotations_class ( memb ) return cls
Works like annotations but is only applicable to classes .
19,147
def dump_cache ( path = None , python2 = False , suffix = None ) : typelogging_enabled_tmp = pytypes . typelogging_enabled pytypes . typelogging_enabled = False if suffix is None : suffix = 'pyi2' if python2 else 'pyi' if path is None : path = pytypes . default_typelogger_path modules = { } for key in _member_cache : node = _member_cache [ key ] mname = node . get_modulename ( ) if not mname in modules : mnode = _module_node ( mname ) modules [ mname ] = mnode else : mnode = modules [ mname ] mnode . append ( node ) for module in modules : _dump_module ( modules [ module ] , path , python2 , suffix ) pytypes . typelogging_enabled = typelogging_enabled_tmp
Writes cached observations by
19,148
def get_indentation ( func ) : src_lines = getsourcelines ( func ) [ 0 ] for line in src_lines : if not ( line . startswith ( '@' ) or line . startswith ( 'def' ) or line . lstrip ( ) . startswith ( '#' ) ) : return line [ : len ( line ) - len ( line . lstrip ( ) ) ] return pytypes . default_indent
Extracts a function s indentation as a string In contrast to an inspect . indentsize based implementation this function preserves tabs if present .
19,149
def typelogged_func ( func ) : if not pytypes . typelogging_enabled : return func if hasattr ( func , 'do_logging' ) : func . do_logging = True return func elif hasattr ( func , 'do_typecheck' ) : return _typeinspect_func ( func , func . do_typecheck , True ) else : return _typeinspect_func ( func , False , True )
Works like typelogged but is only applicable to functions methods and properties .
19,150
def typelogged_class ( cls ) : if not pytypes . typelogging_enabled : return cls assert ( isclass ( cls ) ) keys = [ key for key in cls . __dict__ ] for key in keys : memb = cls . __dict__ [ key ] if _check_as_func ( memb ) : setattr ( cls , key , typelogged_func ( memb ) ) elif isclass ( memb ) : typelogged_class ( memb ) return cls
Works like typelogged but is only applicable to classes .
19,151
def typelogged_module ( md ) : if not pytypes . typelogging_enabled : return md if isinstance ( md , str ) : if md in sys . modules : md = sys . modules [ md ] if md is None : return md elif md in pytypes . typechecker . _pending_modules : pytypes . typechecker . _pending_modules [ md ] . append ( typelogged_module ) return md assert ( ismodule ( md ) ) if md . __name__ in pytypes . typechecker . _pending_modules : pytypes . typechecker . _pending_modules [ md . __name__ ] . append ( typelogged_module ) assert ( ismodule ( md ) ) if md . __name__ in _fully_typelogged_modules and _fully_typelogged_modules [ md . __name__ ] == len ( md . __dict__ ) : return md keys = [ key for key in md . __dict__ ] for key in keys : memb = md . __dict__ [ key ] if _check_as_func ( memb ) and memb . __module__ == md . __name__ : setattr ( md , key , typelogged_func ( memb ) ) elif isclass ( memb ) and memb . __module__ == md . __name__ : typelogged_class ( memb ) if not md . __name__ in pytypes . typechecker . _pending_modules : _fully_typelogged_modules [ md . __name__ ] = len ( md . __dict__ ) return md
Works like typelogged but is only applicable to modules by explicit call ) . md must be a module or a module name contained in sys . modules .
19,152
def enable_global_typechecked_decorator ( flag = True , retrospective = True ) : global global_typechecked_decorator global_typechecked_decorator = flag if import_hook_enabled : _install_import_hook ( ) if global_typechecked_decorator and retrospective : _catch_up_global_typechecked_decorator ( ) return global_typechecked_decorator
Enables or disables global typechecking mode via decorators . See flag global_typechecked_decorator . In contrast to setting the flag directly this function provides a retrospective option . If retrospective is true this will also affect already imported modules not only future imports . Does not work if checking_enabled is false . Does not work reliably if checking_enabled has ever been set to false during current run .
19,153
def enable_global_auto_override_decorator ( flag = True , retrospective = True ) : global global_auto_override_decorator global_auto_override_decorator = flag if import_hook_enabled : _install_import_hook ( ) if global_auto_override_decorator and retrospective : _catch_up_global_auto_override_decorator ( ) return global_auto_override_decorator
Enables or disables global auto_override mode via decorators . See flag global_auto_override_decorator . In contrast to setting the flag directly this function provides a retrospective option . If retrospective is true this will also affect already imported modules not only future imports .
19,154
def enable_global_annotations_decorator ( flag = True , retrospective = True ) : global global_annotations_decorator global_annotations_decorator = flag if import_hook_enabled : _install_import_hook ( ) if global_annotations_decorator and retrospective : _catch_up_global_annotations_decorator ( ) return global_annotations_decorator
Enables or disables global annotation mode via decorators . See flag global_annotations_decorator . In contrast to setting the flag directly this function provides a retrospective option . If retrospective is true this will also affect already imported modules not only future imports .
19,155
def enable_global_typelogged_decorator ( flag = True , retrospective = True ) : global global_typelogged_decorator global_typelogged_decorator = flag if import_hook_enabled : _install_import_hook ( ) if global_typelogged_decorator and retrospective : _catch_up_global_typelogged_decorator ( ) return global_typelogged_decorator
Enables or disables global typelog mode via decorators . See flag global_typelogged_decorator . In contrast to setting the flag directly this function provides a retrospective option . If retrospective is true this will also affect already imported modules not only future imports .
19,156
def enable_global_typechecked_profiler ( flag = True ) : global global_typechecked_profiler , _global_type_agent , global_typelogged_profiler global_typechecked_profiler = flag if flag and checking_enabled : if _global_type_agent is None : _global_type_agent = TypeAgent ( ) _global_type_agent . start ( ) elif not _global_type_agent . active : _global_type_agent . start ( ) elif not flag and not global_typelogged_profiler and not _global_type_agent is None and _global_type_agent . active : _global_type_agent . stop ( )
Enables or disables global typechecking mode via a profiler . See flag global_typechecked_profiler . Does not work if checking_enabled is false .
19,157
def enable_global_typelogged_profiler ( flag = True ) : global global_typelogged_profiler , _global_type_agent , global_typechecked_profiler global_typelogged_profiler = flag if flag and typelogging_enabled : if _global_type_agent is None : _global_type_agent = TypeAgent ( ) _global_type_agent . start ( ) elif not _global_type_agent . active : _global_type_agent . start ( ) elif not flag and not global_typechecked_profiler and not _global_type_agent is None and _global_type_agent . active : _global_type_agent . stop ( )
Enables or disables global typelogging mode via a profiler . See flag global_typelogged_profiler . Does not work if typelogging_enabled is false .
19,158
def typechecked_func ( func , force = False , argType = None , resType = None , prop_getter = False ) : if not pytypes . checking_enabled and not pytypes . do_logging_in_typechecked : return func assert ( _check_as_func ( func ) ) if not force and is_no_type_check ( func ) : return func if hasattr ( func , 'do_typecheck' ) : func . do_typecheck = True return func elif hasattr ( func , 'do_logging' ) : return _typeinspect_func ( func , True , func . do_logging , argType , resType , prop_getter ) else : return _typeinspect_func ( func , True , False , argType , resType , prop_getter )
Works like typechecked but is only applicable to functions methods and properties .
19,159
def typechecked_class ( cls , force = False , force_recursive = False ) : return _typechecked_class ( cls , set ( ) , force , force_recursive )
Works like typechecked but is only applicable to classes .
19,160
def auto_override_class ( cls , force = False , force_recursive = False ) : if not pytypes . checking_enabled : return cls assert ( isclass ( cls ) ) if not force and is_no_type_check ( cls ) : return cls keys = [ key for key in cls . __dict__ ] for key in keys : memb = cls . __dict__ [ key ] if force_recursive or not is_no_type_check ( memb ) : if isfunction ( memb ) or ismethod ( memb ) or ismethoddescriptor ( memb ) : if util . _has_base_method ( memb , cls ) : setattr ( cls , key , override ( memb ) ) elif isclass ( memb ) : auto_override_class ( memb , force_recursive , force_recursive ) return cls
Works like auto_override but is only applicable to classes .
19,161
def is_no_type_check ( memb ) : try : return hasattr ( memb , '__no_type_check__' ) and memb . __no_type_check__ or memb in _not_type_checked except TypeError : return False
Checks if an object was annotated with
19,162
def check_argument_types ( cllable = None , call_args = None , clss = None , caller_level = 0 ) : return _check_caller_type ( False , cllable , call_args , clss , caller_level + 1 )
Can be called from within a function or method to apply typechecking to the arguments that were passed in by the caller . Checking is applied w . r . t . type hints of the function or method hosting the call to check_argument_types .
19,163
def check_return_type ( value , cllable = None , clss = None , caller_level = 0 ) : return _check_caller_type ( True , cllable , value , clss , caller_level + 1 )
Can be called from within a function or method to apply typechecking to the value that is going to be returned . Checking is applied w . r . t . type hints of the function or method hosting the call to check_return_type .
19,164
def load_diagram_from_csv ( filepath , bpmn_diagram ) : sequence_flows = bpmn_diagram . sequence_flows process_elements_dict = bpmn_diagram . process_elements diagram_attributes = bpmn_diagram . diagram_attributes plane_attributes = bpmn_diagram . plane_attributes process_dict = BpmnDiagramGraphCSVImport . import_csv_file_as_dict ( filepath ) BpmnDiagramGraphCSVImport . populate_diagram_elements_dict ( diagram_attributes ) BpmnDiagramGraphCSVImport . populate_process_elements_dict ( process_elements_dict , process_dict ) BpmnDiagramGraphCSVImport . populate_plane_elements_dict ( plane_attributes ) BpmnDiagramGraphCSVImport . import_nodes ( process_dict , bpmn_diagram , sequence_flows ) BpmnDiagramGraphCSVImport . representation_adjustment ( process_dict , bpmn_diagram , sequence_flows )
Reads an CSV file from given filepath and maps it into inner representation of BPMN diagram . Returns an instance of BPMNDiagramGraph class .
19,165
def load_diagram_from_xml ( filepath , bpmn_diagram ) : diagram_graph = bpmn_diagram . diagram_graph sequence_flows = bpmn_diagram . sequence_flows process_elements_dict = bpmn_diagram . process_elements diagram_attributes = bpmn_diagram . diagram_attributes plane_attributes = bpmn_diagram . plane_attributes collaboration = bpmn_diagram . collaboration document = BpmnDiagramGraphImport . read_xml_file ( filepath ) diagram_element = document . getElementsByTagNameNS ( "*" , "BPMNDiagram" ) [ 0 ] plane_element = diagram_element . getElementsByTagNameNS ( "*" , "BPMNPlane" ) [ 0 ] BpmnDiagramGraphImport . import_diagram_and_plane_attributes ( diagram_attributes , plane_attributes , diagram_element , plane_element ) BpmnDiagramGraphImport . import_process_elements ( document , diagram_graph , sequence_flows , process_elements_dict , plane_element ) collaboration_element_list = document . getElementsByTagNameNS ( "*" , consts . Consts . collaboration ) if collaboration_element_list is not None and len ( collaboration_element_list ) > 0 : collaboration_element = collaboration_element_list [ 0 ] BpmnDiagramGraphImport . import_collaboration_element ( diagram_graph , collaboration_element , collaboration ) if consts . Consts . message_flows in collaboration : message_flows = collaboration [ consts . Consts . message_flows ] else : message_flows = { } participants = [ ] if consts . Consts . participants in collaboration : participants = collaboration [ consts . Consts . participants ] for element in utils . BpmnImportUtils . iterate_elements ( plane_element ) : if element . nodeType != element . TEXT_NODE : tag_name = utils . BpmnImportUtils . remove_namespace_from_tag_name ( element . tagName ) if tag_name == consts . Consts . bpmn_shape : BpmnDiagramGraphImport . import_shape_di ( participants , diagram_graph , element ) elif tag_name == consts . Consts . bpmn_edge : BpmnDiagramGraphImport . import_flow_di ( diagram_graph , sequence_flows , message_flows , element )
Reads an XML file from given filepath and maps it into inner representation of BPMN diagram . Returns an instance of BPMNDiagramGraph class .
19,166
def import_collaboration_element ( diagram_graph , collaboration_element , collaboration_dict ) : collaboration_dict [ consts . Consts . id ] = collaboration_element . getAttribute ( consts . Consts . id ) collaboration_dict [ consts . Consts . participants ] = { } participants_dict = collaboration_dict [ consts . Consts . participants ] collaboration_dict [ consts . Consts . message_flows ] = { } message_flows_dict = collaboration_dict [ consts . Consts . message_flows ] for element in utils . BpmnImportUtils . iterate_elements ( collaboration_element ) : if element . nodeType != element . TEXT_NODE : tag_name = utils . BpmnImportUtils . remove_namespace_from_tag_name ( element . tagName ) if tag_name == consts . Consts . participant : BpmnDiagramGraphImport . import_participant_element ( diagram_graph , participants_dict , element ) elif tag_name == consts . Consts . message_flow : BpmnDiagramGraphImport . import_message_flow_to_graph ( diagram_graph , message_flows_dict , element )
Method that imports information from collaboration element .
19,167
def import_participant_element ( diagram_graph , participants_dictionary , participant_element ) : participant_id = participant_element . getAttribute ( consts . Consts . id ) name = participant_element . getAttribute ( consts . Consts . name ) process_ref = participant_element . getAttribute ( consts . Consts . process_ref ) if participant_element . getAttribute ( consts . Consts . process_ref ) == '' : diagram_graph . add_node ( participant_id ) diagram_graph . node [ participant_id ] [ consts . Consts . type ] = consts . Consts . participant diagram_graph . node [ participant_id ] [ consts . Consts . process ] = participant_id participants_dictionary [ participant_id ] = { consts . Consts . name : name , consts . Consts . process_ref : process_ref }
Adds participant element to the collaboration dictionary .
19,168
def import_process_elements ( document , diagram_graph , sequence_flows , process_elements_dict , plane_element ) : for process_element in document . getElementsByTagNameNS ( "*" , consts . Consts . process ) : BpmnDiagramGraphImport . import_process_element ( process_elements_dict , process_element ) process_id = process_element . getAttribute ( consts . Consts . id ) process_attributes = process_elements_dict [ process_id ] lane_set_list = process_element . getElementsByTagNameNS ( "*" , consts . Consts . lane_set ) if lane_set_list is not None and len ( lane_set_list ) > 0 : lane_set = lane_set_list [ 0 ] BpmnDiagramGraphImport . import_lane_set_element ( process_attributes , lane_set , plane_element ) for element in utils . BpmnImportUtils . iterate_elements ( process_element ) : if element . nodeType != element . TEXT_NODE : tag_name = utils . BpmnImportUtils . remove_namespace_from_tag_name ( element . tagName ) BpmnDiagramGraphImport . __import_element_by_tag_name ( diagram_graph , sequence_flows , process_id , process_attributes , element , tag_name ) for flow in utils . BpmnImportUtils . iterate_elements ( process_element ) : if flow . nodeType != flow . TEXT_NODE : tag_name = utils . BpmnImportUtils . remove_namespace_from_tag_name ( flow . tagName ) if tag_name == consts . Consts . sequence_flow : BpmnDiagramGraphImport . import_sequence_flow_to_graph ( diagram_graph , sequence_flows , process_id , flow )
Method for importing all process elements in diagram .
19,169
def import_child_lane_set_element ( child_lane_set_element , plane_element ) : lane_set_id = child_lane_set_element . getAttribute ( consts . Consts . id ) lanes_attr = { } for element in utils . BpmnImportUtils . iterate_elements ( child_lane_set_element ) : if element . nodeType != element . TEXT_NODE : tag_name = utils . BpmnImportUtils . remove_namespace_from_tag_name ( element . tagName ) if tag_name == consts . Consts . lane : lane = element lane_id = lane . getAttribute ( consts . Consts . id ) lane_attr = BpmnDiagramGraphImport . import_lane_element ( lane , plane_element ) lanes_attr [ lane_id ] = lane_attr child_lane_set_attr = { consts . Consts . id : lane_set_id , consts . Consts . lanes : lanes_attr } return child_lane_set_attr
Method for importing childLaneSet element from diagram file .
19,170
def import_task_to_graph ( diagram_graph , process_id , process_attributes , task_element ) : BpmnDiagramGraphImport . import_activity_to_graph ( diagram_graph , process_id , process_attributes , task_element )
Adds to graph the new element that represents BPMN task . In our representation tasks have only basic attributes and elements inherited from Activity type so this method only needs to call add_flownode_to_graph .
19,171
def import_data_object_to_graph ( diagram_graph , process_id , process_attributes , data_object_element ) : BpmnDiagramGraphImport . import_flow_node_to_graph ( diagram_graph , process_id , process_attributes , data_object_element ) data_object_id = data_object_element . getAttribute ( consts . Consts . id ) diagram_graph . node [ data_object_id ] [ consts . Consts . is_collection ] = data_object_element . getAttribute ( consts . Consts . is_collection ) if data_object_element . hasAttribute ( consts . Consts . is_collection ) else "false"
Adds to graph the new element that represents BPMN data object . Data object inherits attributes from FlowNode . In addition an attribute isCollection is added to the node .
19,172
def import_parallel_gateway_to_graph ( diagram_graph , process_id , process_attributes , element ) : BpmnDiagramGraphImport . import_gateway_to_graph ( diagram_graph , process_id , process_attributes , element )
Adds to graph the new element that represents BPMN parallel gateway . Parallel gateway doesn t have additional attributes . Separate method is used to improve code readability .
19,173
def export_task_info ( node_params , output_element ) : if consts . Consts . default in node_params and node_params [ consts . Consts . default ] is not None : output_element . set ( consts . Consts . default , node_params [ consts . Consts . default ] )
Adds Task node attributes to exported XML element
19,174
def export_subprocess_info ( bpmn_diagram , subprocess_params , output_element ) : output_element . set ( consts . Consts . triggered_by_event , subprocess_params [ consts . Consts . triggered_by_event ] ) if consts . Consts . default in subprocess_params and subprocess_params [ consts . Consts . default ] is not None : output_element . set ( consts . Consts . default , subprocess_params [ consts . Consts . default ] ) subprocess_id = subprocess_params [ consts . Consts . id ] nodes = bpmn_diagram . get_nodes_list_by_process_id ( subprocess_id ) for node in nodes : node_id = node [ 0 ] params = node [ 1 ] BpmnDiagramGraphExport . export_node_data ( bpmn_diagram , node_id , params , output_element ) flows = bpmn_diagram . get_flows_list_by_process_id ( subprocess_id ) for flow in flows : params = flow [ 2 ] BpmnDiagramGraphExport . export_flow_process_data ( params , output_element )
Adds Subprocess node attributes to exported XML element
19,175
def export_data_object_info ( bpmn_diagram , data_object_params , output_element ) : output_element . set ( consts . Consts . is_collection , data_object_params [ consts . Consts . is_collection ] )
Adds DataObject node attributes to exported XML element
19,176
def export_complex_gateway_info ( node_params , output_element ) : output_element . set ( consts . Consts . gateway_direction , node_params [ consts . Consts . gateway_direction ] ) if consts . Consts . default in node_params and node_params [ consts . Consts . default ] is not None : output_element . set ( consts . Consts . default , node_params [ consts . Consts . default ] )
Adds ComplexGateway node attributes to exported XML element
19,177
def export_event_based_gateway_info ( node_params , output_element ) : output_element . set ( consts . Consts . gateway_direction , node_params [ consts . Consts . gateway_direction ] ) output_element . set ( consts . Consts . instantiate , node_params [ consts . Consts . instantiate ] ) output_element . set ( consts . Consts . event_gateway_type , node_params [ consts . Consts . event_gateway_type ] )
Adds EventBasedGateway node attributes to exported XML element
19,178
def export_inclusive_exclusive_gateway_info ( node_params , output_element ) : output_element . set ( consts . Consts . gateway_direction , node_params [ consts . Consts . gateway_direction ] ) if consts . Consts . default in node_params and node_params [ consts . Consts . default ] is not None : output_element . set ( consts . Consts . default , node_params [ consts . Consts . default ] )
Adds InclusiveGateway or ExclusiveGateway node attributes to exported XML element
19,179
def export_parallel_gateway_info ( node_params , output_element ) : output_element . set ( consts . Consts . gateway_direction , node_params [ consts . Consts . gateway_direction ] )
Adds parallel gateway node attributes to exported XML element
19,180
def export_start_event_info ( node_params , output_element ) : output_element . set ( consts . Consts . parallel_multiple , node_params . get ( consts . Consts . parallel_multiple ) ) output_element . set ( consts . Consts . is_interrupting , node_params . get ( consts . Consts . is_interrupting ) ) definitions = node_params . get ( consts . Consts . event_definitions ) for definition in definitions : definition_id = definition [ consts . Consts . id ] definition_type = definition [ consts . Consts . definition_type ] output_definition = eTree . SubElement ( output_element , definition_type ) if definition_id != "" : output_definition . set ( consts . Consts . id , definition_id )
Adds StartEvent attributes to exported XML element
19,181
def export_throw_event_info ( node_params , output_element ) : definitions = node_params [ consts . Consts . event_definitions ] for definition in definitions : definition_id = definition [ consts . Consts . id ] definition_type = definition [ consts . Consts . definition_type ] output_definition = eTree . SubElement ( output_element , definition_type ) if definition_id != "" : output_definition . set ( consts . Consts . id , definition_id )
Adds EndEvent or IntermediateThrowingEvent attributes to exported XML element
19,182
def export_boundary_event_info ( node_params , output_element ) : output_element . set ( consts . Consts . parallel_multiple , node_params [ consts . Consts . parallel_multiple ] ) output_element . set ( consts . Consts . cancel_activity , node_params [ consts . Consts . cancel_activity ] ) output_element . set ( consts . Consts . attached_to_ref , node_params [ consts . Consts . attached_to_ref ] ) definitions = node_params [ consts . Consts . event_definitions ] for definition in definitions : definition_id = definition [ consts . Consts . id ] definition_type = definition [ consts . Consts . definition_type ] output_definition = eTree . SubElement ( output_element , definition_type ) if definition_id != "" : output_definition . set ( consts . Consts . id , definition_id )
Adds IntermediateCatchEvent attributes to exported XML element
19,183
def export_process_element ( definitions , process_id , process_attributes_dictionary ) : process = eTree . SubElement ( definitions , consts . Consts . process ) process . set ( consts . Consts . id , process_id ) process . set ( consts . Consts . is_closed , process_attributes_dictionary [ consts . Consts . is_closed ] ) process . set ( consts . Consts . is_executable , process_attributes_dictionary [ consts . Consts . is_executable ] ) process . set ( consts . Consts . process_type , process_attributes_dictionary [ consts . Consts . process_type ] ) return process
Creates process element for exported BPMN XML file .
19,184
def export_lane_set ( process , lane_set , plane_element ) : lane_set_xml = eTree . SubElement ( process , consts . Consts . lane_set ) for key , value in lane_set [ consts . Consts . lanes ] . items ( ) : BpmnDiagramGraphExport . export_lane ( lane_set_xml , key , value , plane_element )
Creates laneSet element for exported BPMN XML file .
19,185
def export_child_lane_set ( parent_xml_element , child_lane_set , plane_element ) : lane_set_xml = eTree . SubElement ( parent_xml_element , consts . Consts . lane_set ) for key , value in child_lane_set [ consts . Consts . lanes ] . items ( ) : BpmnDiagramGraphExport . export_lane ( lane_set_xml , key , value , plane_element )
Creates childLaneSet element for exported BPMN XML file .
19,186
def export_lane ( parent_xml_element , lane_id , lane_attr , plane_element ) : lane_xml = eTree . SubElement ( parent_xml_element , consts . Consts . lane ) lane_xml . set ( consts . Consts . id , lane_id ) lane_xml . set ( consts . Consts . name , lane_attr [ consts . Consts . name ] ) if consts . Consts . child_lane_set in lane_attr and len ( lane_attr [ consts . Consts . child_lane_set ] ) : child_lane_set = lane_attr [ consts . Consts . child_lane_set ] BpmnDiagramGraphExport . export_child_lane_set ( lane_xml , child_lane_set , plane_element ) if consts . Consts . flow_node_refs in lane_attr and len ( lane_attr [ consts . Consts . flow_node_refs ] ) : for flow_node_ref_id in lane_attr [ consts . Consts . flow_node_refs ] : flow_node_ref_xml = eTree . SubElement ( lane_xml , consts . Consts . flow_node_ref ) flow_node_ref_xml . text = flow_node_ref_id output_element_di = eTree . SubElement ( plane_element , BpmnDiagramGraphExport . bpmndi_namespace + consts . Consts . bpmn_shape ) output_element_di . set ( consts . Consts . id , lane_id + "_gui" ) output_element_di . set ( consts . Consts . bpmn_element , lane_id ) output_element_di . set ( consts . Consts . is_horizontal , lane_attr [ consts . Consts . is_horizontal ] ) bounds = eTree . SubElement ( output_element_di , "omgdc:Bounds" ) bounds . set ( consts . Consts . width , lane_attr [ consts . Consts . width ] ) bounds . set ( consts . Consts . height , lane_attr [ consts . Consts . height ] ) bounds . set ( consts . Consts . x , lane_attr [ consts . Consts . x ] ) bounds . set ( consts . Consts . y , lane_attr [ consts . Consts . y ] )
Creates lane element for exported BPMN XML file .
19,187
def export_node_di_data ( node_id , params , plane ) : output_element_di = eTree . SubElement ( plane , BpmnDiagramGraphExport . bpmndi_namespace + consts . Consts . bpmn_shape ) output_element_di . set ( consts . Consts . id , node_id + "_gui" ) output_element_di . set ( consts . Consts . bpmn_element , node_id ) bounds = eTree . SubElement ( output_element_di , "omgdc:Bounds" ) bounds . set ( consts . Consts . width , params [ consts . Consts . width ] ) bounds . set ( consts . Consts . height , params [ consts . Consts . height ] ) bounds . set ( consts . Consts . x , params [ consts . Consts . x ] ) bounds . set ( consts . Consts . y , params [ consts . Consts . y ] ) if params [ consts . Consts . type ] == consts . Consts . subprocess : output_element_di . set ( consts . Consts . is_expanded , params [ consts . Consts . is_expanded ] )
Creates a new BPMNShape XML element for given node parameters and adds it to plane element .
19,188
def export_flow_process_data ( params , process ) : output_flow = eTree . SubElement ( process , consts . Consts . sequence_flow ) output_flow . set ( consts . Consts . id , params [ consts . Consts . id ] ) output_flow . set ( consts . Consts . name , params [ consts . Consts . name ] ) output_flow . set ( consts . Consts . source_ref , params [ consts . Consts . source_ref ] ) output_flow . set ( consts . Consts . target_ref , params [ consts . Consts . target_ref ] ) if consts . Consts . condition_expression in params : condition_expression_params = params [ consts . Consts . condition_expression ] condition_expression = eTree . SubElement ( output_flow , consts . Consts . condition_expression ) condition_expression . set ( consts . Consts . id , condition_expression_params [ consts . Consts . id ] ) condition_expression . set ( consts . Consts . id , condition_expression_params [ consts . Consts . id ] ) condition_expression . text = condition_expression_params [ consts . Consts . condition_expression ] output_flow . set ( consts . Consts . name , condition_expression_params [ consts . Consts . condition_expression ] )
Creates a new SequenceFlow XML element for given edge parameters and adds it to process element .
19,189
def export_flow_di_data ( params , plane ) : output_flow = eTree . SubElement ( plane , BpmnDiagramGraphExport . bpmndi_namespace + consts . Consts . bpmn_edge ) output_flow . set ( consts . Consts . id , params [ consts . Consts . id ] + "_gui" ) output_flow . set ( consts . Consts . bpmn_element , params [ consts . Consts . id ] ) waypoints = params [ consts . Consts . waypoints ] for waypoint in waypoints : waypoint_element = eTree . SubElement ( output_flow , "omgdi:waypoint" ) waypoint_element . set ( consts . Consts . x , waypoint [ 0 ] ) waypoint_element . set ( consts . Consts . y , waypoint [ 1 ] )
Creates a new BPMNEdge XML element for given edge parameters and adds it to plane element .
19,190
def indent ( elem , level = 0 ) : i = "\n" + level * " " j = "\n" + ( level - 1 ) * " " if len ( elem ) : if not elem . text or not elem . text . strip ( ) : elem . text = i + " " if not elem . tail or not elem . tail . strip ( ) : elem . tail = i for subelem in elem : BpmnDiagramGraphExport . indent ( subelem , level + 1 ) if not elem . tail or not elem . tail . strip ( ) : elem . tail = j else : if level and ( not elem . tail or not elem . tail . strip ( ) ) : elem . tail = j return elem
Helper function adds indentation to XML output .
19,191
def generate_nodes_clasification ( bpmn_diagram ) : nodes_classification = { } classification_element = "Element" classification_start_event = "Start Event" classification_end_event = "End Event" task_list = bpmn_diagram . get_nodes ( consts . Consts . task ) for element in task_list : classification_labels = [ classification_element ] BpmnImportUtils . split_join_classification ( element , classification_labels , nodes_classification ) subprocess_list = bpmn_diagram . get_nodes ( consts . Consts . subprocess ) for element in subprocess_list : classification_labels = [ classification_element ] BpmnImportUtils . split_join_classification ( element , classification_labels , nodes_classification ) complex_gateway_list = bpmn_diagram . get_nodes ( consts . Consts . complex_gateway ) for element in complex_gateway_list : classification_labels = [ classification_element ] BpmnImportUtils . split_join_classification ( element , classification_labels , nodes_classification ) event_based_gateway_list = bpmn_diagram . get_nodes ( consts . Consts . event_based_gateway ) for element in event_based_gateway_list : classification_labels = [ classification_element ] BpmnImportUtils . split_join_classification ( element , classification_labels , nodes_classification ) inclusive_gateway_list = bpmn_diagram . get_nodes ( consts . Consts . inclusive_gateway ) for element in inclusive_gateway_list : classification_labels = [ classification_element ] BpmnImportUtils . split_join_classification ( element , classification_labels , nodes_classification ) exclusive_gateway_list = bpmn_diagram . get_nodes ( consts . Consts . exclusive_gateway ) for element in exclusive_gateway_list : classification_labels = [ classification_element ] BpmnImportUtils . split_join_classification ( element , classification_labels , nodes_classification ) parallel_gateway_list = bpmn_diagram . get_nodes ( consts . Consts . parallel_gateway ) for element in parallel_gateway_list : classification_labels = [ classification_element ] BpmnImportUtils . split_join_classification ( element , classification_labels , nodes_classification ) start_event_list = bpmn_diagram . get_nodes ( consts . Consts . start_event ) for element in start_event_list : classification_labels = [ classification_element , classification_start_event ] BpmnImportUtils . split_join_classification ( element , classification_labels , nodes_classification ) intermediate_catch_event_list = bpmn_diagram . get_nodes ( consts . Consts . intermediate_catch_event ) for element in intermediate_catch_event_list : classification_labels = [ classification_element ] BpmnImportUtils . split_join_classification ( element , classification_labels , nodes_classification ) end_event_list = bpmn_diagram . get_nodes ( consts . Consts . end_event ) for element in end_event_list : classification_labels = [ classification_element , classification_end_event ] BpmnImportUtils . split_join_classification ( element , classification_labels , nodes_classification ) intermediate_throw_event_list = bpmn_diagram . get_nodes ( consts . Consts . intermediate_throw_event ) for element in intermediate_throw_event_list : classification_labels = [ classification_element ] BpmnImportUtils . split_join_classification ( element , classification_labels , nodes_classification ) return nodes_classification
Diagram elements classification . Implementation based on article A Simple Algorithm for Automatic Layout of BPMN Processes . Assigns a classification to the diagram element according to specific element parameters . - Element - every element of the process which is not an edge - Start Event - all types of start events - End Event - all types of end events - Join - an element with more than one incoming edge - Split - an element with more than one outgoing edge .
19,192
def split_join_classification ( element , classification_labels , nodes_classification ) : classification_join = "Join" classification_split = "Split" if len ( element [ 1 ] [ consts . Consts . incoming_flow ] ) >= 2 : classification_labels . append ( classification_join ) if len ( element [ 1 ] [ consts . Consts . outgoing_flow ] ) >= 2 : classification_labels . append ( classification_split ) nodes_classification [ element [ 0 ] ] = classification_labels
Add the Split Join classification if the element qualifies for .
19,193
def get_all_gateways ( bpmn_graph ) : gateways = filter ( lambda node : node [ 1 ] [ 'type' ] in GATEWAY_TYPES , bpmn_graph . get_nodes ( ) ) return gateways
Returns a list with all gateways in diagram
19,194
def all_control_flow_elements_count ( bpmn_graph ) : gateway_counts = get_gateway_counts ( bpmn_graph ) events_counts = get_events_counts ( bpmn_graph ) control_flow_elements_counts = gateway_counts . copy ( ) control_flow_elements_counts . update ( events_counts ) return sum ( [ count for name , count in control_flow_elements_counts . items ( ) ] )
Returns the total count of all control flow elements in the BPMNDiagramGraph instance .
19,195
def export_process_to_csv ( bpmn_diagram , directory , filename ) : nodes = copy . deepcopy ( bpmn_diagram . get_nodes ( ) ) start_nodes = [ ] export_elements = [ ] for node in nodes : incoming_list = node [ 1 ] . get ( consts . Consts . incoming_flow ) if len ( incoming_list ) == 0 : start_nodes . append ( node ) if len ( start_nodes ) != 1 : raise bpmn_exception . BpmnPythonError ( "Exporting to CSV format accepts only one start event" ) nodes_classification = utils . BpmnImportUtils . generate_nodes_clasification ( bpmn_diagram ) start_node = start_nodes . pop ( ) BpmnDiagramGraphCsvExport . export_node ( bpmn_diagram , export_elements , start_node , nodes_classification ) try : os . makedirs ( directory ) except OSError as exception : if exception . errno != errno . EEXIST : raise file_object = open ( directory + filename , "w" ) file_object . write ( "Order,Activity,Condition,Who,Subprocess,Terminated\n" ) BpmnDiagramGraphCsvExport . write_export_node_to_file ( file_object , export_elements ) file_object . close ( )
Root method of CSV export functionality .
19,196
def export_node ( bpmn_graph , export_elements , node , nodes_classification , order = 0 , prefix = "" , condition = "" , who = "" , add_join = False ) : node_type = node [ 1 ] [ consts . Consts . type ] if node_type == consts . Consts . start_event : return BpmnDiagramGraphCsvExport . export_start_event ( bpmn_graph , export_elements , node , nodes_classification , order = order , prefix = prefix , condition = condition , who = who ) elif node_type == consts . Consts . end_event : return BpmnDiagramGraphCsvExport . export_end_event ( export_elements , node , order = order , prefix = prefix , condition = condition , who = who ) else : return BpmnDiagramGraphCsvExport . export_element ( bpmn_graph , export_elements , node , nodes_classification , order = order , prefix = prefix , condition = condition , who = who , add_join = add_join )
General method for node exporting
19,197
def export_start_event ( bpmn_graph , export_elements , node , nodes_classification , order = 0 , prefix = "" , condition = "" , who = "" ) : event_definitions = node [ 1 ] . get ( consts . Consts . event_definitions ) if event_definitions is not None and len ( event_definitions ) > 0 : event_definition = node [ 1 ] [ consts . Consts . event_definitions ] [ 0 ] else : event_definition = None if event_definition is None : activity = node [ 1 ] [ consts . Consts . node_name ] elif event_definition [ consts . Consts . definition_type ] == "messageEventDefinition" : activity = "message " + node [ 1 ] [ consts . Consts . node_name ] elif event_definition [ consts . Consts . definition_type ] == "timerEventDefinition" : activity = "timer " + node [ 1 ] [ consts . Consts . node_name ] else : activity = node [ 1 ] [ consts . Consts . node_name ] export_elements . append ( { "Order" : prefix + str ( order ) , "Activity" : activity , "Condition" : condition , "Who" : who , "Subprocess" : "" , "Terminated" : "" } ) outgoing_flow_id = node [ 1 ] [ consts . Consts . outgoing_flow ] [ 0 ] outgoing_flow = bpmn_graph . get_flow_by_id ( outgoing_flow_id ) outgoing_node = bpmn_graph . get_node_by_id ( outgoing_flow [ 2 ] [ consts . Consts . target_ref ] ) return BpmnDiagramGraphCsvExport . export_node ( bpmn_graph , export_elements , outgoing_node , nodes_classification , order + 1 , prefix , who )
Start event export
19,198
def export_end_event ( export_elements , node , order = 0 , prefix = "" , condition = "" , who = "" ) : event_definitions = node [ 1 ] . get ( consts . Consts . event_definitions ) if event_definitions is not None and len ( event_definitions ) > 0 : event_definition = node [ 1 ] [ consts . Consts . event_definitions ] [ 0 ] else : event_definition = None if event_definition is None : activity = node [ 1 ] [ consts . Consts . node_name ] elif event_definition [ consts . Consts . definition_type ] == "messageEventDefinition" : activity = "message " + node [ 1 ] [ consts . Consts . node_name ] else : activity = node [ 1 ] [ consts . Consts . node_name ] export_elements . append ( { "Order" : prefix + str ( order ) , "Activity" : activity , "Condition" : condition , "Who" : who , "Subprocess" : "" , "Terminated" : "yes" } ) return None
End event export
19,199
def write_export_node_to_file ( file_object , export_elements ) : for export_element in export_elements : file_object . write ( export_element [ "Order" ] + "," + export_element [ "Activity" ] + "," + export_element [ "Condition" ] + "," + export_element [ "Who" ] + "," + export_element [ "Subprocess" ] + "," + export_element [ "Terminated" ] + "\n" )
Exporting process to CSV file