idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
239,100
def main ( self ) : for m in self . methods : if m . name in [ 'Main' , 'main' ] : return m if len ( self . methods ) : return self . methods [ 0 ] return None
Return the default method in this module .
47
8
239,101
def orderered_methods ( self ) : oms = [ ] self . methods . reverse ( ) if self . main : oms = [ self . main ] for m in self . methods : if m == self . main : continue oms . append ( m ) return oms
An ordered list of methods
61
5
239,102
def write_methods ( self ) : b_array = bytearray ( ) for key , vm_token in self . all_vm_tokens . items ( ) : b_array . append ( vm_token . out_op ) if vm_token . data is not None and vm_token . vm_op != VMOp . NOP : b_array = b_array + vm_token . data # self.to_s() return b_array
Write all methods in the current module to a byte string .
103
12
239,103
def link_methods ( self ) : from . . compiler import Compiler for method in self . methods : method . prepare ( ) self . all_vm_tokens = OrderedDict ( ) address = 0 for method in self . orderered_methods : if not method . is_interop : # print("ADDING METHOD %s " % method.full_name) method . address = address for key , vmtoken in method . vm_tokens . items ( ) : self . all_vm_tokens [ address ] = vmtoken address += 1 if vmtoken . data is not None and vmtoken . vm_op != VMOp . NOP : address += len ( vmtoken . data ) vmtoken . addr = vmtoken . addr + method . address for key , vmtoken in self . all_vm_tokens . items ( ) : if vmtoken . src_method is not None : target_method = self . method_by_name ( vmtoken . target_method ) if target_method : jump_len = target_method . address - vmtoken . addr param_ret_counts = bytearray ( ) if Compiler . instance ( ) . nep8 : param_ret_counts = vmtoken . data [ 0 : 2 ] jump_len -= 2 if jump_len > - 32767 and jump_len < 32767 : vmtoken . data = param_ret_counts + jump_len . to_bytes ( 2 , 'little' , signed = True ) else : vmtoken . data = param_ret_counts + jump_len . to_bytes ( 4 , 'little' , signed = True ) else : raise Exception ( "Target method %s not found" % vmtoken . target_method )
Perform linkage of addresses between methods .
399
8
239,104
def export_debug ( self , output_path ) : file_hash = hashlib . md5 ( open ( output_path , 'rb' ) . read ( ) ) . hexdigest ( ) avm_name = os . path . splitext ( os . path . basename ( output_path ) ) [ 0 ] json_data = self . generate_debug_json ( avm_name , file_hash ) mapfilename = output_path . replace ( '.avm' , '.debug.json' ) with open ( mapfilename , 'w+' ) as out_file : out_file . write ( json_data )
this method is used to generate a debug map for NEO debugger
138
12
239,105
def load_and_save ( path , output_path = None , use_nep8 = True ) : compiler = Compiler . load ( os . path . abspath ( path ) , use_nep8 = use_nep8 ) data = compiler . write ( ) if output_path is None : fullpath = os . path . realpath ( path ) path , filename = os . path . split ( fullpath ) newfilename = filename . replace ( '.py' , '.avm' ) output_path = '%s/%s' % ( path , newfilename ) Compiler . write_file ( data , output_path ) compiler . entry_module . export_debug ( output_path ) return data
Call load_and_save to load a Python file to be compiled to the . avm format and save the result . By default the resultant . avm file is saved along side the source file .
155
41
239,106
def load ( path , use_nep8 = True ) : Compiler . __instance = None compiler = Compiler . instance ( ) compiler . nep8 = use_nep8 compiler . entry_module = Module ( path ) return compiler
Call load to load a Python file to be compiled but not to write to . avm
51
18
239,107
def add_record_references ( self , app_id , record_id , field_id , target_record_ids ) : self . _swimlane . request ( 'post' , 'app/{0}/record/{1}/add-references' . format ( app_id , record_id ) , json = { 'fieldId' : field_id , 'targetRecordIds' : target_record_ids } )
Bulk operation to directly add record references without making any additional requests
98
13
239,108
def add_comment ( self , app_id , record_id , field_id , message ) : self . _swimlane . request ( 'post' , 'app/{0}/record/{1}/{2}/comment' . format ( app_id , record_id , field_id ) , json = { 'message' : message , 'createdDate' : pendulum . now ( ) . to_rfc3339_string ( ) } )
Directly add a comment to a record without retrieving the app or record first
103
15
239,109
def _evaluate ( self ) : retrieved_records = SortedDict ( ) for record_id , record in six . iteritems ( self . _elements ) : if record is self . _field . _unset : # Record has not yet been retrieved, get it try : record = self . target_app . records . get ( id = record_id ) except SwimlaneHTTP400Error : # Record appears to be orphaned, don't include in set of elements logger . debug ( "Received 400 response retrieving record '{}', ignoring assumed orphaned record" ) continue retrieved_records [ record_id ] = record self . _elements = retrieved_records return self . _elements . values ( )
Scan for orphaned records and retrieve any records that have not already been grabbed
155
15
239,110
def add ( self , record ) : self . _field . validate_value ( record ) self . _elements [ record . id ] = record self . _sync_field ( )
Add a reference to the provided record
39
7
239,111
def remove ( self , record ) : self . _field . validate_value ( record ) del self . _elements [ record . id ] self . _sync_field ( )
Remove a reference to the provided record
38
7
239,112
def target_app ( self ) : if self . __target_app is None : self . __target_app = self . _swimlane . apps . get ( id = self . __target_app_id ) return self . __target_app
Defer target app retrieval until requested
54
7
239,113
def validate_value ( self , value ) : if value not in ( None , self . _unset ) : super ( ReferenceField , self ) . validate_value ( value ) if value . app != self . target_app : raise ValidationError ( self . record , "Reference field '{}' has target app '{}', cannot reference record '{}' from app '{}'" . format ( self . name , self . target_app , value , value . app ) )
Validate provided record is a part of the appropriate target app for the field
105
15
239,114
def set_swimlane ( self , value ) : # Move single record into list to be handled the same by cursor class if not self . multiselect : if value and not isinstance ( value , list ) : value = [ value ] # Values come in as a list of record ids or None value = value or [ ] records = SortedDict ( ) for record_id in value : records [ record_id ] = self . _unset return super ( ReferenceField , self ) . set_swimlane ( records )
Store record ids in separate location for later use but ignore initial value
114
14
239,115
def set_python ( self , value ) : if not self . multiselect : if value and not isinstance ( value , list ) : value = [ value ] value = value or [ ] records = SortedDict ( ) for record in value : self . validate_value ( record ) records [ record . id ] = record return_value = self . _set ( records ) self . record . _raw [ 'values' ] [ self . id ] = self . get_swimlane ( ) return return_value
Expect list of record instances convert to a SortedDict for internal representation
111
16
239,116
def get_swimlane ( self ) : value = super ( ReferenceField , self ) . get_swimlane ( ) if value : ids = list ( value . keys ( ) ) if self . multiselect : return ids return ids [ 0 ] return None
Return list of record ids
59
6
239,117
def get_python ( self ) : cursor = super ( ReferenceField , self ) . get_python ( ) if self . multiselect : return cursor else : try : return cursor [ 0 ] except IndexError : return None
Return cursor if multi - select direct value if single - select
47
12
239,118
def get ( self , key , value ) : if key == 'id' : # Server returns 204 instead of 404 for a non-existent app id response = self . _swimlane . request ( 'get' , 'app/{}' . format ( value ) ) if response . status_code == 204 : raise ValueError ( 'No app with id "{}"' . format ( value ) ) return App ( self . _swimlane , response . json ( ) ) else : # Workaround for lack of support for get by name # Holdover from previous driver support, to be fixed as part of 3.x for app in self . list ( ) : if value and value == app . name : return app # No matching app found raise ValueError ( 'No app with name "{}"' . format ( value ) )
Get single app by one of id or name
174
9
239,119
def list ( self ) : response = self . _swimlane . request ( 'get' , 'app' ) return [ App ( self . _swimlane , item ) for item in response . json ( ) ]
Retrieve list of all apps
47
6
239,120
def users ( self ) : if self . __users is None : self . __users = GroupUsersCursor ( swimlane = self . _swimlane , user_ids = self . __user_ids ) return self . __users
Returns a GroupUsersCursor with list of User instances for this Group
50
14
239,121
def _evaluate ( self ) : if self . _elements : for element in self . _elements : yield element else : for user_id in self . __user_ids : element = self . _swimlane . users . get ( id = user_id ) self . _elements . append ( element ) yield element
Lazily retrieve and build User instances from returned data
70
11
239,122
def _user_raw_from_login_content ( login_content ) : matching_keys = [ 'displayName' , 'lastLogin' , 'active' , 'name' , 'isMe' , 'lastPasswordChangedDate' , 'passwordResetRequired' , 'groups' , 'roles' , 'email' , 'isAdmin' , 'createdDate' , 'modifiedDate' , 'createdByUser' , 'modifiedByUser' , 'userName' , 'id' , 'disabled' ] raw_data = { '$type' : User . _type , } for key in matching_keys : if key in login_content : raw_data [ key ] = login_content [ key ] return raw_data
Returns a User instance with appropriate raw data parsed from login response content
159
13
239,123
def __verify_server_version ( self ) : if compare_versions ( '.' . join ( [ _lib_major_version , _lib_minor_version ] ) , self . product_version ) > 0 : logger . warning ( 'Client version {} connecting to server with newer minor release {}.' . format ( _lib_full_version , self . product_version ) ) if compare_versions ( _lib_major_version , self . product_version ) != 0 : raise InvalidSwimlaneProductVersion ( self , '{}.0' . format ( _lib_major_version ) , '{}.0' . format ( str ( int ( _lib_major_version ) + 1 ) ) )
Verify connected to supported server product version
154
8
239,124
def settings ( self ) : if not self . __settings : self . __settings = self . request ( 'get' , 'settings' ) . json ( ) return self . __settings
Retrieve and cache settings from server
39
7
239,125
def product_version ( self ) : version_separator = '+' if version_separator in self . version : # Post product/build version separation return self . version . split ( version_separator ) [ 0 ] # Pre product/build version separation return self . version . split ( '-' ) [ 0 ]
Swimlane product version
68
5
239,126
def build_number ( self ) : version_separator = '+' if version_separator in self . version : # Post product/build version separation return self . version . split ( version_separator ) [ 2 ] # Pre product/build version separation return self . version . split ( '-' ) [ 1 ]
Swimlane build number
68
5
239,127
def authenticate ( self ) : # Temporarily remove auth from Swimlane session for auth request to avoid recursive loop during login request self . _swimlane . _session . auth = None resp = self . _swimlane . request ( 'post' , 'user/login' , json = { 'userName' : self . _username , 'password' : self . _password } , ) self . _swimlane . _session . auth = self # Get JWT from response content json_content = resp . json ( ) token = json_content . pop ( 'token' , None ) # Grab token expiration token_data = jwt . decode ( token , verify = False ) token_expiration = pendulum . from_timestamp ( token_data [ 'exp' ] ) headers = { 'Authorization' : 'Bearer {}' . format ( token ) } # Create User instance for authenticating user from login response data user = User ( self . _swimlane , _user_raw_from_login_content ( json_content ) ) self . _login_headers = headers self . user = user self . _token_expiration = token_expiration
Send login request and update User instance login headers and token expiration
252
12
239,128
def _evaluate ( self ) : if self . _elements : for element in self . _elements : yield element else : for page in itertools . count ( ) : raw_elements = self . _retrieve_raw_elements ( page ) for raw_element in raw_elements : element = self . _parse_raw_element ( raw_element ) self . _elements . append ( element ) yield element if self . __limit and len ( self . _elements ) >= self . __limit : break if any ( [ len ( raw_elements ) < self . page_size , ( self . __limit and len ( self . _elements ) >= self . __limit ) ] ) : break
Lazily retrieve and paginate report results and build Record instances from returned data
156
16
239,129
def _validate_user ( self , user ) : # All users allowed if self . _show_all_users : return # User specifically allowed if user . id in self . _allowed_user_ids : return # User allowed by group membership user_member_group_ids = set ( [ g [ 'id' ] for g in user . _raw [ 'groups' ] ] ) if user_member_group_ids & self . _allowed_member_ids : return raise ValidationError ( self . record , 'User `{}` is not a valid selection for field `{}`' . format ( user , self . name ) )
Validate a User instance against allowed user IDs or membership in a group
139
14
239,130
def _validate_group ( self , group ) : # All groups allowed if self . _show_all_groups : return # Group specifically allowed if group . id in self . _allowed_group_ids : return # Group allowed by subgroup membership for parent_group_id in self . _allowed_subgroup_ids : # Get each group, and check subgroup ids parent_group = self . _swimlane . groups . get ( id = parent_group_id ) parent_group_child_ids = set ( [ g [ 'id' ] for g in parent_group . _raw [ 'groups' ] ] ) if group . id in parent_group_child_ids : return raise ValidationError ( self . record , 'Group `{}` is not a valid selection for field `{}`' . format ( group , self . name ) )
Validate a Group instance against allowed group IDs or subgroup of a parent group
188
16
239,131
def cast_to_python ( self , value ) : # v2.x does not provide a distinction between users and groups at the field selection level, can only return # UserGroup instances instead of specific User or Group instances if value is not None : value = UserGroup ( self . _swimlane , value ) return value
Convert JSON definition to UserGroup object
68
8
239,132
def cursor ( self ) : if self . _cursor is None : # pylint: disable=not-callable self . _cursor = self . cursor_class ( self , self . get_initial_elements ( ) ) return self . _cursor
Cache and return cursor_class instance
57
7
239,133
def comment ( self , message ) : message = str ( message ) sw_repr = { '$type' : 'Core.Models.Record.Comments, Core' , 'createdByUser' : self . _record . _swimlane . user . as_usergroup_selection ( ) , 'createdDate' : pendulum . now ( ) . to_rfc3339_string ( ) , 'message' : message } comment = Comment ( self . _swimlane , sw_repr ) self . _elements . append ( comment ) self . _record . _raw [ 'comments' ] . setdefault ( self . _field . id , [ ] ) self . _record . _raw [ 'comments' ] [ self . _field . id ] . append ( comment . _raw ) return comment
Add new comment to record comment field
177
7
239,134
def get_recursive_subclasses ( cls ) : return cls . __subclasses__ ( ) + [ g for s in cls . __subclasses__ ( ) for g in get_recursive_subclasses ( s ) ]
Return list of all subclasses for a class including subclasses of direct subclasses
52
16
239,135
def import_submodules ( package ) : if isinstance ( package , str ) : package = importlib . import_module ( package ) results = { } for _ , full_name , is_pkg in pkgutil . walk_packages ( package . __path__ , package . __name__ + '.' ) : results [ full_name ] = importlib . import_module ( full_name ) if is_pkg : results . update ( import_submodules ( full_name ) ) return results
Return list of imported module instances from beneath root_package
107
11
239,136
def one_of_keyword_only ( * valid_keywords ) : def decorator ( func ) : @ functools . wraps ( func ) def wrapper ( * args , * * kwargs ) : sentinel = object ( ) values = { } for key in valid_keywords : kwarg_value = kwargs . pop ( key , sentinel ) if kwarg_value is not sentinel : values [ key ] = kwarg_value if kwargs : raise TypeError ( 'Unexpected arguments: {}' . format ( kwargs ) ) if not values : raise TypeError ( 'Must provide one of {} as keyword argument' . format ( ', ' . join ( valid_keywords ) ) ) if len ( values ) > 1 : raise TypeError ( 'Must provide only one of {} as keyword argument. Received {}' . format ( ', ' . join ( valid_keywords ) , values ) ) return func ( * ( args + values . popitem ( ) ) ) return wrapper return decorator
Decorator to help make one - and - only - one keyword - only argument functions more reusable
222
20
239,137
def get_python ( self ) : value = super ( DatetimeField , self ) . get_python ( ) if value is not None : # Handle subtypes with matching Pendulum types if self . input_type == self . _type_time : value = value . time ( ) if self . input_type == self . _type_date : value = value . date ( ) return value
Coerce to best date type representation for the field subtype
83
13
239,138
def cast_to_swimlane ( self , value ) : if value is None : return value if self . input_type == self . _type_interval : return value . in_seconds ( ) * 1000 return self . format_datetime ( value )
Return datetimes formatted as expected by API and timespans as millisecond epochs
56
17
239,139
def for_json ( self ) : value = super ( DatetimeField , self ) . for_json ( ) # Order of instance checks matters for proper inheritance checks if isinstance ( value , pendulum . Interval ) : return value . in_seconds ( ) * 1000 if isinstance ( value , datetime ) : return self . format_datetime ( value ) if isinstance ( value , pendulum . Time ) : return str ( value ) if isinstance ( value , pendulum . Date ) : return value . to_date_string ( )
Return date ISO8601 string formats for datetime date and time values milliseconds for intervals
116
17
239,140
def report_factory ( app , report_name , * * kwargs ) : # pylint: disable=protected-access created = pendulum . now ( ) . to_rfc3339_string ( ) user_model = app . _swimlane . user . as_usergroup_selection ( ) return Report ( app , { "$type" : Report . _type , "groupBys" : [ ] , "aggregates" : [ ] , "applicationIds" : [ app . id ] , "columns" : [ ] , "sorts" : { "$type" : "System.Collections.Generic.Dictionary`2" "[[System.String, mscorlib]," "[Core.Models.Search.SortTypes, Core]], mscorlib" , } , "filters" : [ ] , "defaultSearchReport" : False , "allowed" : [ ] , "permissions" : { "$type" : "Core.Models.Security.PermissionMatrix, Core" } , "createdDate" : created , "modifiedDate" : created , "createdByUser" : user_model , "modifiedByUser" : user_model , "id" : None , "name" : report_name , "disabled" : False , "keywords" : "" } , * * kwargs )
Report instance factory populating boilerplate raw data
296
9
239,141
def filter ( self , field_name , operand , value ) : if operand not in self . _FILTER_OPERANDS : raise ValueError ( 'Operand must be one of {}' . format ( ', ' . join ( self . _FILTER_OPERANDS ) ) ) # Use temp Record instance for target app to translate values into expected API format record_stub = record_factory ( self . _app ) field = record_stub . get_field ( field_name ) self . _raw [ 'filters' ] . append ( { "fieldId" : field . id , "filterType" : operand , "value" : field . get_report ( value ) } )
Adds a filter to report
153
5
239,142
def list ( self ) : raw_reports = self . _swimlane . request ( 'get' , "reports?appId={}" . format ( self . _app . id ) ) . json ( ) # Ignore StatsReports for now return [ Report ( self . _app , raw_report ) for raw_report in raw_reports if raw_report [ '$type' ] == Report . _type ]
Retrieve all reports for parent app
88
7
239,143
def get ( self , report_id ) : return Report ( self . _app , self . _swimlane . request ( 'get' , "reports/{0}" . format ( report_id ) ) . json ( ) )
Retrieve report by ID
50
5
239,144
def get ( self , key , value ) : if key == 'id' : response = self . _swimlane . request ( 'get' , 'groups/{}' . format ( value ) ) return Group ( self . _swimlane , response . json ( ) ) else : response = self . _swimlane . request ( 'get' , 'groups/lookup?name={}' . format ( value ) ) matched_groups = response . json ( ) for group_data in matched_groups : if group_data . get ( 'name' ) == value : return Group ( self . _swimlane , group_data ) raise ValueError ( 'Unable to find group with name "{}"' . format ( value ) )
Retrieve single group record by id or name
160
9
239,145
def get ( self , arg , value ) : if arg == 'id' : response = self . _swimlane . request ( 'get' , 'user/{}' . format ( value ) ) try : user_data = response . json ( ) except ValueError : raise ValueError ( 'Unable to find user with ID "{}"' . format ( value ) ) return User ( self . _swimlane , user_data ) else : response = self . _swimlane . request ( 'get' , 'user/search?query={}' . format ( quote_plus ( value ) ) ) matched_users = response . json ( ) # Display name not unique, fail if multiple users share the same target display name target_matches = [ ] for user_data in matched_users : user_display_name = user_data . get ( 'displayName' ) if user_display_name == value : target_matches . append ( user_data ) # No matches if not target_matches : raise ValueError ( 'Unable to find user with display name "{}"' . format ( value ) ) # Multiple matches if len ( target_matches ) > 1 : raise ValueError ( 'Multiple users returned with display name "{}". Matching user IDs: {}' . format ( value , ', ' . join ( [ '"{}"' . format ( r [ 'id' ] ) for r in target_matches ] ) ) ) return User ( self . _swimlane , target_matches [ 0 ] )
Retrieve single user record by id or username
331
9
239,146
def _evaluate ( self ) : if not self . __retrieved : self . _elements = self . _retrieve_revisions ( ) self . __retrieved = True return super ( RevisionCursor , self ) . _evaluate ( )
Lazily retrieves caches and returns the list of record _revisions
52
15
239,147
def _retrieve_revisions ( self ) : response = self . _swimlane . request ( 'get' , 'history' , params = { 'type' : 'Records' , 'id' : self . _record . id } ) raw_revisions = response . json ( ) return [ Revision ( self . _record , raw ) for raw in raw_revisions ]
Retrieve and populate Revision instances from history API endpoint
83
10
239,148
def validate_value ( self , value ) : super ( ValuesListField , self ) . validate_value ( value ) if value is not None : if value not in self . selection_to_id_map : raise ValidationError ( self . record , 'Field "{}" invalid value "{}". Valid options: {}' . format ( self . name , value , ', ' . join ( self . selection_to_id_map . keys ( ) ) ) )
Validate provided value is one of the valid options
98
10
239,149
def cast_to_report ( self , value ) : value = super ( ValuesListField , self ) . cast_to_report ( value ) if value : return value [ 'id' ]
Report format uses only the value s id
41
8
239,150
def validate_filters_or_records ( filters_or_records ) : # If filters_or_records is empty, fail if not filters_or_records : raise ValueError ( 'Must provide at least one filter tuples or Records' ) # If filters_or_records is not list of Record or tuple, fail if not isinstance ( filters_or_records [ 0 ] , ( Record , tuple ) ) : raise ValueError ( 'Cannot provide both filter tuples and Records' ) # If filters_or_records is not list of either Record or only tuple, fail _type = type ( filters_or_records [ 0 ] ) for item in filters_or_records : if not isinstance ( item , _type ) : raise ValueError ( "Expected filter tuple or Record, received {0}" . format ( item ) ) return _type
Validation for filters_or_records variable from bulk_modify and bulk_delete
192
19
239,151
def get ( self , key , value ) : if key == 'id' : response = self . _swimlane . request ( 'get' , "app/{0}/record/{1}" . format ( self . _app . id , value ) ) return Record ( self . _app , response . json ( ) ) if key == 'tracking_id' : response = self . _swimlane . request ( 'get' , "app/{0}/record/tracking/{1}" . format ( self . _app . id , value ) ) return Record ( self . _app , response . json ( ) )
Get a single record by id
136
6
239,152
def search ( self , * filters , * * kwargs ) : report = self . _app . reports . build ( 'search-' + random_string ( 8 ) , keywords = kwargs . pop ( 'keywords' , [ ] ) , limit = kwargs . pop ( 'limit' , Report . default_limit ) ) for filter_tuples in filters : report . filter ( * filter_tuples ) return list ( report )
Shortcut to generate a new temporary search report using provided filters and return the resulting records
96
17
239,153
def create ( self , * * fields ) : new_record = record_factory ( self . _app , fields ) new_record . save ( ) return new_record
Create and return a new record in associated app and return the newly created Record instance
37
16
239,154
def bulk_create ( self , * records ) : if not records : raise TypeError ( 'Must provide at least one record' ) if any ( not isinstance ( r , dict ) for r in records ) : raise TypeError ( 'New records must be provided as dicts' ) # Create local records from factory for initial full validation new_records = [ ] for record_data in records : record = record_factory ( self . _app , record_data ) record . validate ( ) new_records . append ( record ) self . _swimlane . request ( 'post' , 'app/{}/record/batch' . format ( self . _app . id ) , json = [ r . _raw for r in new_records ] )
Create and validate multiple records in associated app
164
8
239,155
def _validate_list ( self , target ) : # Check list length restrictions min_items = self . _field . field_definition . get ( 'minItems' ) max_items = self . _field . field_definition . get ( 'maxItems' ) if min_items is not None : if len ( target ) < min_items : raise ValidationError ( self . _record , "Field '{}' must have a minimum of {} item(s)" . format ( self . _field . name , min_items ) ) if max_items is not None : if len ( target ) > max_items : raise ValidationError ( self . _record , "Field '{}' can only have a maximum of {} item(s)" . format ( self . _field . name , max_items ) ) # Individual item validation for item in target : self . _validate_item ( item )
Validate a list against field validation rules
195
8
239,156
def set_swimlane ( self , value ) : value = value or [ ] self . _initial_value_to_ids_map = defaultdict ( list ) for item in value : self . _initial_value_to_ids_map [ item [ 'value' ] ] . append ( item [ 'id' ] ) return super ( ListField , self ) . set_swimlane ( [ d [ 'value' ] for d in value ] )
Convert from list of dicts with values to list of values
99
13
239,157
def set_python ( self , value ) : if not isinstance ( value , ( list , type ( None ) ) ) : raise ValidationError ( self . record , "Field '{}' must be set to a list, not '{}'" . format ( self . name , value . __class__ ) ) value = value or [ ] self . cursor . _validate_list ( value ) return super ( ListField , self ) . set_python ( value )
Validate using cursor for consistency between direct set of values vs modification of cursor values
101
16
239,158
def cast_to_swimlane ( self , value ) : value = super ( ListField , self ) . cast_to_swimlane ( value ) if not value : return None # Copy initial values to pop IDs out as each value is hydrated back to server format, without modifying initial # cache of value -> list(ids) map value_ids = deepcopy ( self . _initial_value_to_ids_map ) return [ self . _build_list_item ( item , value_ids [ item ] . pop ( 0 ) if value_ids [ item ] else None ) for item in value ]
Restore swimlane format attempting to keep initial IDs for any previously existing values
131
15
239,159
def select ( self , element ) : self . _field . validate_value ( element ) self . _elements . add ( element ) self . _sync_field ( )
Add an element to the set of selected elements Proxy to internal set . add and sync field
37
18
239,160
def get_python ( self ) : if self . multiselect : return super ( MultiSelectField , self ) . get_python ( ) return self . _get ( )
Only return cursor instance if configured for multiselect
37
10
239,161
def get_swimlane ( self ) : if self . multiselect : value = self . _get ( ) children = [ ] if value : for child in value : children . append ( self . cast_to_swimlane ( child ) ) return children return None return super ( MultiSelectField , self ) . get_swimlane ( )
Handle multi - select and single - select modes
75
9
239,162
def set_python ( self , value ) : if self . multiselect : value = value or [ ] elements = [ ] for element in value : self . validate_value ( element ) elements . append ( element ) value = elements else : self . validate_value ( value ) self . _set ( value )
Override to remove key from raw data when empty to work with server 2 . 16 + validation
66
18
239,163
def set_swimlane ( self , value ) : if self . multiselect : value = value or [ ] children = [ ] for child in value : children . append ( self . cast_to_python ( child ) ) return self . _set ( children ) return super ( MultiSelectField , self ) . set_swimlane ( value )
Cast all multi - select elements to correct internal type like single - select mode
75
15
239,164
def for_json ( self ) : if self . multiselect : return super ( MultiSelectField , self ) . for_json ( ) value = self . get_python ( ) if hasattr ( value , 'for_json' ) : return value . for_json ( ) return value
Handle multi - select vs single - select
62
8
239,165
def record_factory ( app , fields = None ) : # pylint: disable=line-too-long record = Record ( app , { '$type' : Record . _type , 'isNew' : True , 'applicationId' : app . id , 'comments' : { '$type' : 'System.Collections.Generic.Dictionary`2[[System.String, mscorlib],[System.Collections.Generic.List`1[[Core.Models.Record.Comments, Core]], mscorlib]], mscorlib' } , 'values' : { '$type' : 'System.Collections.Generic.Dictionary`2[[System.String, mscorlib],[System.Object, mscorlib]], mscorlib' } } ) fields = fields or { } for name , value in six . iteritems ( fields ) : record [ name ] = value # Pop off fields with None value to allow for saving empty fields copy_raw = copy . copy ( record . _raw ) values_dict = { } for key , value in six . iteritems ( copy_raw [ 'values' ] ) : if value is not None : values_dict [ key ] = value record . _raw [ 'values' ] = values_dict return record
Return a temporary Record instance to be used for field validation and value parsing
282
14
239,166
def set_python ( self , value ) : # hook exists to stringify before validation # set to string if not string or unicode if value is not None and not isinstance ( value , self . supported_types ) or isinstance ( value , int ) : value = str ( value ) return super ( TextField , self ) . set_python ( value )
Set field internal value from the python representation of field value
76
11
239,167
def compare_versions ( version_a , version_b , zerofill = False ) : a_sections = list ( ( int ( match ) for match in re . findall ( r'\d+' , version_a ) ) ) b_sections = list ( ( int ( match ) for match in re . findall ( r'\d+' , version_b ) ) ) if zerofill : max_sections = max ( [ len ( a_sections ) , len ( b_sections ) ] ) a_sections += [ 0 for _ in range ( max ( max_sections - len ( a_sections ) , 0 ) ) ] b_sections += [ 0 for _ in range ( max ( max_sections - len ( b_sections ) , 0 ) ) ] else : min_sections = min ( [ len ( a_sections ) , len ( b_sections ) ] ) a_sections = a_sections [ : min_sections ] b_sections = b_sections [ : min_sections ] return ( b_sections > a_sections ) - ( b_sections < a_sections )
Return direction of version relative to provided version sections
239
9
239,168
def requires_swimlane_version ( min_version = None , max_version = None ) : if min_version is None and max_version is None : raise ValueError ( 'Must provide either min_version, max_version, or both' ) if min_version and max_version and compare_versions ( min_version , max_version ) < 0 : raise ValueError ( 'min_version must be <= max_version ({}, {})' . format ( min_version , max_version ) ) def decorator ( func ) : @ functools . wraps ( func ) def wrapper ( self , * args , * * kwargs ) : swimlane = self . _swimlane if min_version and compare_versions ( min_version , swimlane . build_version , True ) < 0 : raise InvalidSwimlaneBuildVersion ( swimlane , min_version , max_version ) if max_version and compare_versions ( swimlane . build_version , max_version , True ) < 0 : raise InvalidSwimlaneBuildVersion ( swimlane , min_version , max_version ) return func ( self , * args , * * kwargs ) return wrapper return decorator
Decorator for SwimlaneResolver methods verifying Swimlane server build version is within a given inclusive range
256
21
239,169
def get_report ( self , value ) : if self . multiselect : value = value or [ ] children = [ ] for child in value : children . append ( self . cast_to_report ( child ) ) return children return self . cast_to_report ( value )
Return provided field Python value formatted for use in report filter
60
11
239,170
def get_bulk_modify ( self , value ) : if self . multiselect : value = value or [ ] return [ self . cast_to_bulk_modify ( child ) for child in value ] return self . cast_to_bulk_modify ( value )
Return value in format for bulk modify
63
7
239,171
def validate_value ( self , value ) : if self . readonly : raise ValidationError ( self . record , "Cannot set readonly field '{}'" . format ( self . name ) ) if value not in ( None , self . _unset ) : if self . supported_types and not isinstance ( value , tuple ( self . supported_types ) ) : raise ValidationError ( self . record , "Field '{}' expects one of {}, got '{}' instead" . format ( self . name , ', ' . join ( [ repr ( t . __name__ ) for t in self . supported_types ] ) , type ( value ) . __name__ ) )
Validate value is an acceptable type during set_python operation
150
12
239,172
def _set ( self , value ) : self . _value = value self . record . _raw [ 'values' ] [ self . id ] = self . get_swimlane ( )
Default setter used for both representations unless overridden
41
10
239,173
def resolve_field_class ( field_definition ) : try : return _FIELD_TYPE_MAP [ field_definition [ '$type' ] ] except KeyError as error : error . message = 'No field available to handle Swimlane $type "{}"' . format ( field_definition ) raise
Return field class most fitting of provided Swimlane field definition
63
11
239,174
def get_cache_index_key ( resource ) : if isinstance ( resource , APIResource ) : attr , attr_value = list ( resource . get_cache_index_keys ( ) . items ( ) ) [ 0 ] key = ( type ( resource ) , attr , attr_value ) else : key = tuple ( resource ) if len ( key ) != 3 : raise TypeError ( 'Cache key must be tuple of (class, key, value), got `{!r}` instead' . format ( key ) ) if not issubclass ( key [ 0 ] , APIResource ) : raise TypeError ( 'First value of cache key must be a subclass of APIResource, got `{!r}` instead' . format ( key [ 0 ] ) ) return key
Return a usable cache lookup key for an already initialized resource
175
11
239,175
def check_cache ( resource_type ) : def decorator ( func ) : @ functools . wraps ( func ) def wrapper ( * args , * * kwargs ) : try : adapter = args [ 0 ] key , val = list ( kwargs . items ( ) ) [ 0 ] except IndexError : logger . warning ( "Couldn't generate full index key, skipping cache" ) else : index_key = ( resource_type , key , val ) try : cached_record = adapter . _swimlane . resources_cache [ index_key ] except KeyError : logger . debug ( 'Cache miss: `{!r}`' . format ( index_key ) ) else : logger . debug ( 'Cache hit: `{!r}`' . format ( cached_record ) ) return cached_record # Fallback to default function call return func ( * args , * * kwargs ) return wrapper return decorator
Decorator for adapter methods to check cache for resource before normally sending requests to retrieve data
201
18
239,176
def cache ( self , resource ) : if not isinstance ( resource , APIResource ) : raise TypeError ( 'Cannot cache `{!r}`, can only cache APIResource instances' . format ( resource ) ) # Disable inserts to cache when disabled if self . __cache_max_size == 0 : return try : cache_internal_key = resource . get_cache_internal_key ( ) cache_index_keys = resource . get_cache_index_keys ( ) . items ( ) except NotImplementedError : logger . warning ( 'Not caching `{!r}`, resource did not provide all necessary cache details' . format ( resource ) ) else : resource_type = type ( resource ) for key , value in cache_index_keys : self . __cache_index_key_map [ ( resource_type , key , value ) ] = cache_internal_key self . __caches [ resource_type ] [ cache_internal_key ] = resource logger . debug ( 'Cached `{!r}`' . format ( resource ) )
Insert a resource instance into appropriate resource cache
232
8
239,177
def clear ( self , * resource_types ) : resource_types = resource_types or tuple ( self . __caches . keys ( ) ) for cls in resource_types : # Clear and delete cache instances to guarantee no lingering references self . __caches [ cls ] . clear ( ) del self . __caches [ cls ]
Clear cache for each provided APIResource class or all resources if no classes are provided
73
18
239,178
def _set ( self , value ) : super ( AttachmentsField , self ) . _set ( value ) self . _cursor = None
Override setter allow clearing cursor
31
6
239,179
def verify_otp ( request ) : ctx = { } if request . method == "POST" : verification_code = request . POST . get ( 'verification_code' ) if verification_code is None : ctx [ 'error_message' ] = "Missing verification code." else : otp_ = UserOTP . objects . get ( user = request . user ) totp_ = totp . TOTP ( otp_ . secret_key ) is_verified = totp_ . verify ( verification_code ) if is_verified : request . session [ 'verfied_otp' ] = True response = redirect ( request . POST . get ( "next" , settings . LOGIN_REDIRECT_URL ) ) return update_rmb_cookie ( request , response ) ctx [ 'error_message' ] = "Your code is expired or invalid." ctx [ 'next' ] = request . GET . get ( 'next' , settings . LOGIN_REDIRECT_URL ) return render ( request , 'django_mfa/login_verify.html' , ctx , status = 400 )
Verify a OTP request
247
6
239,180
def at ( self , for_time , counter_offset = 0 ) : if not isinstance ( for_time , datetime . datetime ) : for_time = datetime . datetime . fromtimestamp ( int ( for_time ) ) return self . generate_otp ( self . timecode ( for_time ) + counter_offset )
Accepts either a Unix timestamp integer or a Time object . Time objects will be adjusted to UTC automatically
75
20
239,181
def verify ( self , otp , for_time = None , valid_window = 0 ) : if for_time is None : for_time = datetime . datetime . now ( ) if valid_window : for i in range ( - valid_window , valid_window + 1 ) : if utils . strings_equal ( str ( otp ) , str ( self . at ( for_time , i ) ) ) : return True return False return utils . strings_equal ( str ( otp ) , str ( self . at ( for_time ) ) )
Verifies the OTP passed in against the current time OTP
122
13
239,182
def provisioning_uri ( self , name , issuer_name = None ) : return utils . build_uri ( self . secret , name , issuer_name = issuer_name )
Returns the provisioning URI for the OTP This can then be encoded in a QR Code and used to provision the Google Authenticator app
39
27
239,183
def build_uri ( secret , name , initial_count = None , issuer_name = None ) : # initial_count may be 0 as a valid param is_initial_count_present = ( initial_count is not None ) otp_type = 'hotp' if is_initial_count_present else 'totp' base = 'otpauth://%s/' % otp_type if issuer_name : issuer_name = quote ( issuer_name ) base += '%s:' % issuer_name uri = '%(base)s%(name)s?secret=%(secret)s' % { 'name' : quote ( name , safe = '@' ) , 'secret' : secret , 'base' : base , } if is_initial_count_present : uri += '&counter=%s' % initial_count if issuer_name : uri += '&issuer=%s' % issuer_name return uri
Returns the provisioning URI for the OTP ; works for either TOTP or HOTP .
212
20
239,184
def strings_equal ( s1 , s2 ) : try : s1 = unicodedata . normalize ( 'NFKC' , str ( s1 ) ) s2 = unicodedata . normalize ( 'NFKC' , str ( s2 ) ) except : s1 = unicodedata . normalize ( 'NFKC' , unicode ( s1 ) ) s2 = unicodedata . normalize ( 'NFKC' , unicode ( s2 ) ) return compare_digest ( s1 , s2 )
Timing - attack resistant string comparison .
114
8
239,185
def GetPythonLibraryDirectoryPath ( ) : path = sysconfig . get_python_lib ( True ) _ , _ , path = path . rpartition ( sysconfig . PREFIX ) if path . startswith ( os . sep ) : path = path [ 1 : ] return path
Retrieves the Python library directory path .
61
9
239,186
def run ( self ) : compiler = new_compiler ( compiler = self . compiler ) if compiler . compiler_type == "msvc" : self . define = [ ( "UNICODE" , "" ) , ] else : command = "sh configure --disable-shared-libs" output = self . _RunCommand ( command ) print_line = False for line in output . split ( "\n" ) : line = line . rstrip ( ) if line == "configure:" : print_line = True if print_line : print ( line ) self . define = [ ( "HAVE_CONFIG_H" , "" ) , ( "LOCALEDIR" , "\"/usr/share/locale\"" ) , ] build_ext . run ( self )
Runs the build extension .
168
6
239,187
def _ReadConfigureAc ( self ) : file_object = open ( "configure.ac" , "rb" ) if not file_object : raise IOError ( "Unable to open: configure.ac" ) found_ac_init = False found_library_name = False for line in file_object . readlines ( ) : line = line . strip ( ) if found_library_name : library_version = line [ 1 : - 2 ] if sys . version_info [ 0 ] >= 3 : library_version = library_version . decode ( "ascii" ) self . library_version = library_version break elif found_ac_init : library_name = line [ 1 : - 2 ] if sys . version_info [ 0 ] >= 3 : library_name = library_name . decode ( "ascii" ) self . library_name = library_name found_library_name = True elif line . startswith ( b"AC_INIT" ) : found_ac_init = True file_object . close ( ) if not self . library_name or not self . library_version : raise RuntimeError ( "Unable to find library name and version in: configure.ac" )
Reads configure . ac to initialize the project information .
266
11
239,188
def _ReadMakefileAm ( self ) : if not self . library_name : raise RuntimeError ( "Missing library name" ) file_object = open ( "Makefile.am" , "rb" ) if not file_object : raise IOError ( "Unable to open: Makefile.am" ) found_subdirs = False for line in file_object . readlines ( ) : line = line . strip ( ) if found_subdirs : library_name , _ , _ = line . partition ( b" " ) if sys . version_info [ 0 ] >= 3 : library_name = library_name . decode ( "ascii" ) self . include_directories . append ( library_name ) if library_name . startswith ( "lib" ) : self . library_names . append ( library_name ) if library_name == self . library_name : break elif line . startswith ( b"SUBDIRS" ) : found_subdirs = True file_object . close ( ) if not self . include_directories or not self . library_names : raise RuntimeError ( "Unable to find include directories and library names in: " "Makefile.am" )
Reads Makefile . am to initialize the project information .
266
12
239,189
def babel_compile ( source , * * kwargs ) : presets = kwargs . get ( 'presets' ) if not presets : kwargs [ 'presets' ] = [ "es2015" ] with open ( BABEL_COMPILER , 'rb' ) as babel_js : return evaljs ( ( babel_js . read ( ) . decode ( 'utf-8' ) , 'var bres, res;' 'bres = Babel.transform(dukpy.es6code, dukpy.babel_options);' , 'res = {map: bres.map, code: bres.code};' ) , es6code = source , babel_options = kwargs )
Compiles the given source from ES6 to ES5 using Babeljs
165
14
239,190
def coffee_compile ( source ) : with open ( COFFEE_COMPILER , 'rb' ) as coffeescript_js : return evaljs ( ( coffeescript_js . read ( ) . decode ( 'utf-8' ) , 'CoffeeScript.compile(dukpy.coffeecode)' ) , coffeecode = source )
Compiles the given source from CoffeeScript to JavaScript
83
10
239,191
def register_path ( self , path ) : self . _paths . insert ( 0 , os . path . abspath ( path ) )
Registers a directory where to look for modules .
30
10
239,192
def lookup ( self , module_name ) : for search_path in self . _paths : module_path = os . path . join ( search_path , module_name ) new_module_name , module_file = self . _lookup ( module_path , module_name ) if module_file : return new_module_name , module_file return None , None
Searches for a file providing given module .
82
10
239,193
def load ( self , module_name ) : module_name , path = self . lookup ( module_name ) if path : with open ( path , 'rb' ) as f : return module_name , f . read ( ) . decode ( 'utf-8' ) return None , None
Returns source code and normalized module id of the given module .
62
12
239,194
def less_compile ( source , options = None ) : options = options or { } res = NodeLikeInterpreter ( ) . evaljs ( ( 'var result = null;' 'var less = require("less/less-node");' , 'less.render(dukpy.lesscode, dukpy.lessoptions, function(error, output) {' ' result = {"error": error, "output": output};' '});' 'result;' ) , lesscode = source , lessoptions = options ) if not res : raise RuntimeError ( 'Results or errors unavailable' ) if res . get ( 'error' ) : raise LessCompilerError ( res [ 'error' ] [ 'message' ] ) return res [ 'output' ] [ 'css' ]
Compiles the given source from LESS to CSS
169
10
239,195
def install_jspackage ( package_name , version , modulesdir ) : if not version : version = '' requirements = _resolve_dependencies ( package_name , version ) print ( 'Packages going to be installed: {0}' . format ( ', ' . join ( '{0}->{1}' . format ( * i ) for i in requirements ) ) ) downloads = { } for dependency_name , _ , version_info in requirements : try : downloads [ dependency_name ] = version_info [ 'dist' ] [ 'tarball' ] except KeyError : raise JSPackageInstallError ( 'Unable to detect a supported download url for package' , error_code = 3 ) for dependency_name , download_url in downloads . items ( ) : tarball = BytesIO ( ) print ( 'Fetching {0}' . format ( download_url ) , end = '' ) with closing ( urlopen ( download_url ) ) as data : chunk = data . read ( 1024 ) while chunk : print ( '.' , end = '' ) tarball . write ( chunk ) chunk = data . read ( 1024 ) print ( '' ) tarball . seek ( 0 ) with closing ( tarfile . open ( fileobj = tarball ) ) as tb : dest = os . path . join ( modulesdir , dependency_name ) tmpdir = tempfile . mkdtemp ( ) try : tb . extractall ( tmpdir ) shutil . rmtree ( os . path . abspath ( dest ) , ignore_errors = True ) shutil . move ( os . path . join ( tmpdir , 'package' ) , os . path . abspath ( dest ) ) finally : shutil . rmtree ( tmpdir ) print ( 'Installing {0} in {1} Done!' . format ( package_name , modulesdir ) )
Installs a JavaScript package downloaded from npmjs . org .
405
12
239,196
def evaljs ( self , code , * * kwargs ) : jsvars = json . dumps ( kwargs ) jscode = self . _adapt_code ( code ) if not isinstance ( jscode , bytes ) : jscode = jscode . encode ( 'utf-8' ) if not isinstance ( jsvars , bytes ) : jsvars = jsvars . encode ( 'utf-8' ) res = _dukpy . eval_string ( self , jscode , jsvars ) if res is None : return None return json . loads ( res . decode ( 'utf-8' ) )
Runs JavaScript code in the context of the interpreter .
140
11
239,197
def typescript_compile ( source ) : with open ( TS_COMPILER , 'r' ) as tsservices_js : return evaljs ( ( tsservices_js . read ( ) , 'ts.transpile(dukpy.tscode, {options});' . format ( options = TSC_OPTIONS ) ) , tscode = source )
Compiles the given source from TypeScript to ES5 using TypescriptServices . js
87
18
239,198
def get_private_file ( self ) : return PrivateFile ( request = self . request , storage = self . get_storage ( ) , relative_name = self . get_path ( ) )
Return all relevant data in a single object so this is easy to extend and server implementations can pick what they need .
42
23
239,199
def get ( self , request , * args , * * kwargs ) : private_file = self . get_private_file ( ) if not self . can_access_file ( private_file ) : return HttpResponseForbidden ( 'Private storage access denied' ) if not private_file . exists ( ) : return self . serve_file_not_found ( private_file ) else : return self . serve_file ( private_file )
Handle incoming GET requests
97
4