idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
45,100 | def remove_data ( self , request , pk = None ) : collection = self . get_object ( ) if 'ids' not in request . data : return Response ( { "error" : "`ids`parameter is required" } , status = status . HTTP_400_BAD_REQUEST ) for data_id in request . data [ 'ids' ] : collection . data . remove ( data_id ) return Response ( ) | Remove data from collection . |
45,101 | def register_lookup ( self , lookup ) : if lookup . operator in self . _lookups : raise KeyError ( "Lookup for operator '{}' is already registered" . format ( lookup . operator ) ) self . _lookups [ lookup . operator ] = lookup ( ) | Register lookup . |
45,102 | def get_lookup ( self , operator ) : try : return self . _lookups [ operator ] except KeyError : raise NotImplementedError ( "Lookup operator '{}' is not supported" . format ( operator ) ) | Look up a lookup . |
45,103 | def build ( self , search , raw_query ) : unmatched_items = { } for expression , value in raw_query . items ( ) : tokens = expression . split ( TOKEN_SEPARATOR ) field = tokens [ 0 ] tail = tokens [ 1 : ] if field not in self . fields : unmatched_items [ expression ] = value continue field = self . fields_map . get ( field , field ) if tail : if len ( tail ) > 1 : raise NotImplementedError ( "Nested lookup expressions are not supported" ) lookup = self . get_lookup ( tail [ 0 ] ) search = lookup . apply ( search , field , value ) else : custom_filter = getattr ( self . custom_filter_object , 'custom_filter_{}' . format ( field ) , None ) if custom_filter is not None : search = custom_filter ( value , search ) elif isinstance ( value , list ) : filters = [ Q ( 'match' , ** { field : item } ) for item in value ] search = search . query ( 'bool' , should = filters ) else : search = search . query ( 'match' , ** { field : { 'query' : value , 'operator' : 'and' } } ) return ( search , unmatched_items ) | Build query . |
45,104 | def resolve_sid ( f ) : @ functools . wraps ( f ) def wrapper ( request , did , * args , ** kwargs ) : pid = resolve_sid_func ( request , did ) return f ( request , pid , * args , ** kwargs ) return wrapper | View handler decorator that adds SID resolve and PID validation . |
45,105 | def trusted_permission ( f ) : @ functools . wraps ( f ) def wrapper ( request , * args , ** kwargs ) : trusted ( request ) return f ( request , * args , ** kwargs ) return wrapper | Access only by D1 infrastructure . |
45,106 | def authenticated ( f ) : @ functools . wraps ( f ) def wrapper ( request , * args , ** kwargs ) : if d1_common . const . SUBJECT_AUTHENTICATED not in request . all_subjects_set : raise d1_common . types . exceptions . NotAuthorized ( 0 , 'Access allowed only for authenticated subjects. Please reconnect with ' 'a valid DataONE session certificate. active_subjects="{}"' . format ( d1_gmn . app . auth . format_active_subjects ( request ) ) , ) return f ( request , * args , ** kwargs ) return wrapper | Access only with a valid session . |
45,107 | def required_permission ( f , level ) : @ functools . wraps ( f ) def wrapper ( request , pid , * args , ** kwargs ) : d1_gmn . app . auth . assert_allowed ( request , level , pid ) return f ( request , pid , * args , ** kwargs ) return wrapper | Assert that subject has access at given level or higher for object . |
45,108 | def _read_and_deserialize_dataone_type ( self , response ) : try : return d1_common . xml . deserialize ( response . content ) except ValueError as e : self . _raise_service_failure_invalid_dataone_type ( response , e ) | Given a response body try to create an instance of a DataONE type . |
45,109 | def isAuthorized ( self , pid , action , vendorSpecific = None ) : response = self . isAuthorizedResponse ( pid , action , vendorSpecific ) return self . _read_boolean_401_response ( response ) | Return True if user is allowed to perform action on pid else False . |
45,110 | def fields ( self ) : fields = super ( ) . fields return apply_subfield_projection ( self , copy . copy ( fields ) ) | Filter fields based on request query parameters . |
45,111 | def iterate_fields ( fields , schema , path_prefix = None ) : if path_prefix is not None and path_prefix != '' and path_prefix [ - 1 ] != '.' : path_prefix += '.' schema_dict = { val [ 'name' ] : val for val in schema } for field_id , properties in fields . items ( ) : path = '{}{}' . format ( path_prefix , field_id ) if path_prefix is not None else None if field_id not in schema_dict : raise KeyError ( "Field definition ({}) missing in schema" . format ( field_id ) ) if 'group' in schema_dict [ field_id ] : for rvals in iterate_fields ( properties , schema_dict [ field_id ] [ 'group' ] , path ) : yield rvals if path_prefix is not None else rvals [ : 2 ] else : rvals = ( schema_dict [ field_id ] , fields , path ) yield rvals if path_prefix is not None else rvals [ : 2 ] | Iterate over all field values sub - fields . |
45,112 | def iterate_schema ( fields , schema , path_prefix = '' ) : if path_prefix and path_prefix [ - 1 ] != '.' : path_prefix += '.' for field_schema in schema : name = field_schema [ 'name' ] if 'group' in field_schema : for rvals in iterate_schema ( fields [ name ] if name in fields else { } , field_schema [ 'group' ] , '{}{}' . format ( path_prefix , name ) ) : yield rvals else : yield ( field_schema , fields , '{}{}' . format ( path_prefix , name ) ) | Iterate over all schema sub - fields . |
45,113 | def iterate_dict ( container , exclude = None , path = None ) : if path is None : path = [ ] for key , value in container . items ( ) : if callable ( exclude ) and exclude ( key , value ) : continue if isinstance ( value , collections . Mapping ) : for inner_path , inner_key , inner_value in iterate_dict ( value , exclude = exclude , path = path + [ key ] ) : yield inner_path , inner_key , inner_value yield path , key , value | Iterate over a nested dictionary . |
45,114 | def get_subjects ( request ) : if _is_certificate_provided ( request ) : try : return get_authenticated_subjects ( request . META [ 'SSL_CLIENT_CERT' ] ) except Exception as e : raise d1_common . types . exceptions . InvalidToken ( 0 , 'Error extracting session from certificate. error="{}"' . format ( str ( e ) ) , ) else : return d1_common . const . SUBJECT_PUBLIC , set ( ) | Get all subjects in the certificate . |
45,115 | def get_authenticated_subjects ( cert_pem ) : if isinstance ( cert_pem , str ) : cert_pem = cert_pem . encode ( 'utf-8' ) return d1_common . cert . subjects . extract_subjects ( cert_pem ) | Return primary subject and set of equivalents authenticated by certificate . |
45,116 | def get_serializer_class ( self ) : base_class = super ( ) . get_serializer_class ( ) class SerializerWithPermissions ( base_class ) : def get_fields ( serializer_self ) : fields = super ( ) . get_fields ( ) fields [ 'current_user_permissions' ] = CurrentUserPermissionsSerializer ( read_only = True ) return fields def to_representation ( serializer_self , instance ) : data = super ( ) . to_representation ( instance ) if ( 'fields' not in self . request . query_params or 'current_user_permissions' in self . request . query_params [ 'fields' ] ) : data [ 'current_user_permissions' ] = get_object_perms ( instance , self . request . user ) return data return SerializerWithPermissions | Augment base serializer class . |
45,117 | def detail_permissions ( self , request , pk = None ) : obj = self . get_object ( ) if request . method == 'POST' : content_type = ContentType . objects . get_for_model ( obj ) payload = request . data share_content = strtobool ( payload . pop ( 'share_content' , 'false' ) ) user = request . user is_owner = user . has_perm ( 'owner_{}' . format ( content_type ) , obj = obj ) allow_owner = is_owner or user . is_superuser check_owner_permission ( payload , allow_owner ) check_public_permissions ( payload ) check_user_permissions ( payload , request . user . pk ) with transaction . atomic ( ) : update_permission ( obj , payload ) owner_count = UserObjectPermission . objects . filter ( object_pk = obj . id , content_type = content_type , permission__codename__startswith = 'owner_' ) . count ( ) if not owner_count : raise exceptions . ParseError ( 'Object must have at least one owner.' ) if share_content : self . set_content_permissions ( user , obj , payload ) return Response ( get_object_perms ( obj ) ) | Get or set permissions API endpoint . |
45,118 | def save ( self , * args , ** kwargs ) : if self . descriptor_schema : try : validate_schema ( self . descriptor , self . descriptor_schema . schema ) self . descriptor_dirty = False except DirtyError : self . descriptor_dirty = True elif self . descriptor and self . descriptor != { } : raise ValueError ( "`descriptor_schema` must be defined if `descriptor` is given" ) super ( ) . save ( ) | Perform descriptor validation and save object . |
45,119 | def _scalar2array ( d ) : da = { } for k , v in d . items ( ) : if '_' not in k : da [ k ] = v else : name = '' . join ( k . split ( '_' ) [ : - 1 ] ) ind = k . split ( '_' ) [ - 1 ] dim = len ( ind ) if name not in da : shape = tuple ( 3 for i in range ( dim ) ) da [ name ] = np . empty ( shape , dtype = complex ) da [ name ] [ : ] = np . nan da [ name ] [ tuple ( int ( i ) - 1 for i in ind ) ] = v return da | Convert a dictionary with scalar elements and string indices _1234 to a dictionary of arrays . Unspecified entries are np . nan . |
45,120 | def _symm_current ( C ) : nans = np . isnan ( C ) C [ nans ] = np . einsum ( 'klij' , C ) [ nans ] return C | To get rid of NaNs produced by _scalar2array symmetrize operators where C_ijkl = C_klij |
45,121 | def _antisymm_12 ( C ) : nans = np . isnan ( C ) C [ nans ] = - np . einsum ( 'jikl' , C ) [ nans ] return C | To get rid of NaNs produced by _scalar2array antisymmetrize the first two indices of operators where C_ijkl = - C_jikl |
45,122 | def JMS_to_array ( C , sectors = None ) : if sectors is None : wc_keys = wcxf . Basis [ 'WET' , 'JMS' ] . all_wcs else : try : wc_keys = [ k for s in sectors for k in wcxf . Basis [ 'WET' , 'JMS' ] . sectors [ s ] ] except KeyError : print ( sectors ) C_complete = { k : C . get ( k , 0 ) for k in wc_keys } Ca = _scalar2array ( C_complete ) for k in Ca : if k in C_symm_keys [ 5 ] : Ca [ k ] = _symm_herm ( Ca [ k ] ) if k in C_symm_keys [ 41 ] : Ca [ k ] = _symm_current ( Ca [ k ] ) if k in C_symm_keys [ 4 ] : Ca [ k ] = _symm_herm ( _symm_current ( Ca [ k ] ) ) if k in C_symm_keys [ 9 ] : Ca [ k ] = _antisymm_12 ( Ca [ k ] ) return Ca | For a dictionary with JMS Wilson coefficients return a dictionary of arrays . |
45,123 | def symmetrize_JMS_dict ( C ) : wc_keys = set ( wcxf . Basis [ 'WET' , 'JMS' ] . all_wcs ) Cs = { } for op , v in C . items ( ) : if '_' not in op or op in wc_keys : Cs [ op ] = v continue name , ind = op . split ( '_' ) if name in C_symm_keys [ 5 ] : i , j , k , l = ind indnew = '' . join ( [ j , i , l , k ] ) Cs [ '_' . join ( [ name , indnew ] ) ] = v . conjugate ( ) elif name in C_symm_keys [ 41 ] : i , j , k , l = ind indnew = '' . join ( [ k , l , i , j ] ) Cs [ '_' . join ( [ name , indnew ] ) ] = v elif name in C_symm_keys [ 4 ] : i , j , k , l = ind indnew = '' . join ( [ l , k , j , i ] ) newname = '_' . join ( [ name , indnew ] ) if newname in wc_keys : Cs [ newname ] = v . conjugate ( ) else : indnew = '' . join ( [ j , i , l , k ] ) newname = '_' . join ( [ name , indnew ] ) if newname in wc_keys : Cs [ newname ] = v . conjugate ( ) else : indnew = '' . join ( [ k , l , i , j ] ) newname = '_' . join ( [ name , indnew ] ) Cs [ newname ] = v elif name in C_symm_keys [ 9 ] : i , j , k , l = ind indnew = '' . join ( [ j , i , k , l ] ) Cs [ '_' . join ( [ name , indnew ] ) ] = - v return Cs | For a dictionary with JMS Wilson coefficients but keys that might not be in the non - redundant basis return a dictionary with keys from the basis and values conjugated if necessary . |
45,124 | def rotate_down ( C_in , p ) : C = C_in . copy ( ) V = ckmutil . ckm . ckm_tree ( p [ "Vus" ] , p [ "Vub" ] , p [ "Vcb" ] , p [ "delta" ] ) UdL = V for k in [ 'VddLL' ] : C [ k ] = np . einsum ( 'ia,jb,kc,ld,ijkl->abcd' , UdL . conj ( ) , UdL , UdL . conj ( ) , UdL , C_in [ k ] ) for k in [ 'V1udLL' , 'V8udLL' , 'VedLL' , 'VnudLL' ] : C [ k ] = np . einsum ( 'kc,ld,ijkl->ijcd' , UdL . conj ( ) , UdL , C_in [ k ] ) for k in [ 'V1ddLR' , 'V1duLR' , 'V8ddLR' , 'V8duLR' , 'VdeLR' ] : C [ k ] = np . einsum ( 'ia,jb,ijkl->abkl' , UdL . conj ( ) , UdL , C_in [ k ] ) for k in [ 'S1ddRR' , 'S8ddRR' ] : C [ k ] = np . einsum ( 'ia,kc,ijkl->ajcl' , UdL . conj ( ) , UdL . conj ( ) , C_in [ k ] ) for k in [ 'V1udduLR' , 'V8udduLR' ] : C [ k ] = np . einsum ( 'jb,ijkl->ibkl' , UdL , C_in [ k ] ) for k in [ 'VnueduLL' , 'SedRR' , 'TedRR' , 'SnueduRR' , 'TnueduRR' , 'S1udRR' , 'S8udRR' , 'S1udduRR' , 'S8udduRR' , ] : C [ k ] = np . einsum ( 'kc,ijkl->ijcl' , UdL . conj ( ) , C_in [ k ] ) for k in [ 'SedRL' , ] : C [ k ] = np . einsum ( 'ld,ijkl->ijkd' , UdL , C_in [ k ] ) for k in [ 'SduuLL' , 'SduuLR' ] : C [ k ] = np . einsum ( 'ia,ijkl->ajkl' , UdL , C_in [ k ] ) for k in [ 'SuudRL' , 'SdudRL' ] : C [ k ] = np . einsum ( 'kc,ijkl->ijcl' , UdL , C_in [ k ] ) for k in [ 'SuddLL' ] : C [ k ] = np . einsum ( 'jb,kc,ijkl->ibcl' , UdL , UdL , C_in [ k ] ) return C | Redefinition of all Wilson coefficients in the JMS basis when rotating down - type quark fields from the flavour to the mass basis . |
45,125 | def unscale_dict_wet ( C ) : return { k : _scale_dict [ k ] * v for k , v in C . items ( ) } | Undo the scaling applied in scale_dict_wet . |
45,126 | def submit ( self , data , runtime_dir , argv ) : logger . debug ( __ ( "Connector '{}' running for Data with id {} ({})." , self . __class__ . __module__ , data . id , repr ( argv ) ) ) subprocess . Popen ( argv , cwd = runtime_dir , stdin = subprocess . DEVNULL ) . wait ( ) | Run process locally . |
45,127 | def are_checksums_equal ( checksum_a_pyxb , checksum_b_pyxb ) : if checksum_a_pyxb . algorithm != checksum_b_pyxb . algorithm : raise ValueError ( 'Cannot compare checksums calculated with different algorithms. ' 'a="{}" b="{}"' . format ( checksum_a_pyxb . algorithm , checksum_b_pyxb . algorithm ) ) return checksum_a_pyxb . value ( ) . lower ( ) == checksum_b_pyxb . value ( ) . lower ( ) | Determine if checksums are equal . |
45,128 | def format_checksum ( checksum_pyxb ) : return '{}/{}' . format ( checksum_pyxb . algorithm . upper ( ) . replace ( '-' , '' ) , checksum_pyxb . value ( ) . lower ( ) ) | Create string representation of a PyXB Checksum object . |
45,129 | def handle ( self , * args , ** kwargs ) : listener = ExecutorListener ( redis_params = getattr ( settings , 'FLOW_MANAGER' , { } ) . get ( 'REDIS_CONNECTION' , { } ) ) def _killer ( signum , frame ) : listener . terminate ( ) signal ( SIGINT , _killer ) signal ( SIGTERM , _killer ) async def _runner ( ) : if kwargs [ 'clear_queue' ] : await listener . clear_queue ( ) async with listener : pass loop = asyncio . new_event_loop ( ) loop . run_until_complete ( _runner ( ) ) loop . close ( ) | Run the executor listener . This method never returns . |
45,130 | def apply_subfield_projection ( field , value , deep = False ) : prefix = [ ] root = field while root . parent is not None : if root . field_name : prefix . append ( root . field_name ) root = root . parent prefix = prefix [ : : - 1 ] context = getattr ( root , '_context' , { } ) request = context . get ( 'request' ) if request is None : return value filtered = set ( request . query_params . get ( 'fields' , '' ) . split ( FIELD_SEPARATOR ) ) filtered . discard ( '' ) if not filtered : return value current_level = len ( prefix ) current_projection = [ ] for item in filtered : item = item . split ( FIELD_DEREFERENCE ) if len ( item ) <= current_level : continue if item [ : current_level ] == prefix : if deep : current_projection . append ( item [ current_level : ] ) else : current_projection . append ( [ item [ current_level ] ] ) if deep and not current_projection : return value return apply_projection ( current_projection , value ) | Apply projection from request context . |
45,131 | def apply_projection ( projection , value ) : if isinstance ( value , Sequence ) : return [ apply_projection ( projection , item ) for item in value ] elif not isinstance ( value , Mapping ) : return value try : current_projection = [ p [ 0 ] for p in projection ] except IndexError : return value for name in list ( value . keys ( ) ) : if name not in current_projection : value . pop ( name ) elif isinstance ( value [ name ] , dict ) : value [ name ] = apply_projection ( [ p [ 1 : ] for p in projection if p [ 0 ] == name ] , value [ name ] ) return value | Apply projection . |
45,132 | def _create_partitions ( self , instance , partitions ) : for partition in partitions : RelationPartition . objects . create ( relation = instance , entity = partition [ 'entity' ] , label = partition . get ( 'label' , None ) , position = partition . get ( 'position' , None ) , ) | Create partitions . |
45,133 | def create ( self , validated_data ) : partitions = validated_data . pop ( 'relationpartition_set' ) with transaction . atomic ( ) : instance = Relation . objects . create ( ** validated_data ) self . _create_partitions ( instance , partitions ) return instance | Create Relation object and add partitions of Entities . |
45,134 | def update ( self , instance , validated_data ) : partitions = validated_data . pop ( 'relationpartition_set' , None ) with transaction . atomic ( ) : instance = super ( ) . update ( instance , validated_data ) if partitions is not None : instance . relationpartition_set . all ( ) . delete ( ) self . _create_partitions ( instance , partitions ) return instance | Update Relation . |
45,135 | def _print_level ( level , msg ) : for l in str ( msg . rstrip ( ) ) . split ( "\n" ) : print ( "{0:<9s}{1}" . format ( level , str ( l ) ) ) | Print the information in Unicode safe manner . |
45,136 | def get_process_definition_start ( fname , slug ) : with open ( fname ) as file_ : for i , line in enumerate ( file_ ) : if re . search ( r'slug:\s*{}' . format ( slug ) , line ) : return i + 1 return 1 | Find the first line of process definition . |
45,137 | def get_processes ( process_dir , base_source_uri ) : global PROCESS_CACHE if PROCESS_CACHE is not None : return PROCESS_CACHE all_process_files = [ ] process_file_extensions = [ '*.yaml' , '*.yml' ] for root , _ , filenames in os . walk ( process_dir ) : for extension in process_file_extensions : for filename in fnmatch . filter ( filenames , extension ) : all_process_files . append ( os . path . join ( root , filename ) ) def read_yaml_file ( fname ) : with open ( fname ) as f : return yaml . load ( f , Loader = yaml . FullLoader ) processes = [ ] for process_file in all_process_files : processes_in_file = read_yaml_file ( process_file ) for process in processes_in_file : startline = get_process_definition_start ( process_file , process [ 'slug' ] ) process [ 'source_uri' ] = base_source_uri + process_file [ len ( process_dir ) + 1 : ] + '#L' + str ( startline ) if 'category' not in process : process [ 'category' ] = 'uncategorized' processes . append ( process ) PROCESS_CACHE = processes return processes | Find processes in path . |
45,138 | def setup ( app ) : app . add_config_value ( 'autoprocess_process_dir' , '' , 'env' ) app . add_config_value ( 'autoprocess_source_base_url' , '' , 'env' ) app . add_config_value ( 'autoprocess_definitions_uri' , '' , 'env' ) app . add_directive ( 'autoprocess' , AutoProcessDirective ) app . add_directive ( 'autoprocesscategory' , AutoProcessCategoryDirective ) app . add_directive ( 'autoprocesstype' , AutoProcessTypesDirective ) return { 'version' : '0.2' } | Register directives . |
45,139 | def make_field ( self , field_name , field_body ) : name = nodes . field_name ( ) name += nodes . Text ( field_name ) paragraph = nodes . paragraph ( ) if isinstance ( field_body , str ) : paragraph += nodes . Text ( field_body ) else : paragraph += field_body body = nodes . field_body ( ) body += paragraph field = nodes . field ( ) field . extend ( [ name , body ] ) return field | Fill content into nodes . |
45,140 | def make_properties_list ( self , field ) : properties_list = nodes . field_list ( ) property_names = [ 'label' , 'type' , 'description' , 'required' , 'disabled' , 'hidden' , 'default' , 'placeholder' , 'validate_regex' , 'choices' , 'collapse' , 'group' ] for name in property_names : if name not in field : continue value = field [ name ] if name in [ 'type' , 'default' , 'placeholder' , 'validate_regex' ] : literal_node = nodes . literal ( str ( value ) , str ( value ) ) properties_list += self . make_field ( name , literal_node ) elif name == 'choices' : bullet_list = nodes . bullet_list ( ) for choice in value : label = nodes . Text ( choice [ 'label' ] + ': ' ) val = nodes . literal ( choice [ 'value' ] , choice [ 'value' ] ) paragraph = nodes . paragraph ( ) paragraph += label paragraph += val list_item = nodes . list_item ( ) list_item += paragraph bullet_list += list_item properties_list += self . make_field ( name , bullet_list ) else : properties_list += self . make_field ( name , str ( value ) ) return properties_list | Fill the field into a properties list and return it . |
45,141 | def make_process_header ( self , slug , typ , version , source_uri , description , inputs ) : node = addnodes . desc ( ) signode = addnodes . desc_signature ( slug , '' ) node . append ( signode ) node [ 'objtype' ] = node [ 'desctype' ] = typ signode += addnodes . desc_annotation ( typ , typ , classes = [ 'process-type' ] ) signode += addnodes . desc_addname ( '' , '' ) signode += addnodes . desc_name ( slug + ' ' , slug + ' ' ) paramlist = addnodes . desc_parameterlist ( ) for field_schema , _ , _ in iterate_schema ( { } , inputs , '' ) : field_type = field_schema [ 'type' ] field_name = field_schema [ 'name' ] field_default = field_schema . get ( 'default' , None ) field_default = '' if field_default is None else '={}' . format ( field_default ) param = addnodes . desc_parameter ( '' , '' , noemph = True ) param += nodes . emphasis ( field_type , field_type , classes = [ 'process-type' ] ) param += nodes . strong ( text = '\xa0\xa0' + field_name ) paramlist += param signode += paramlist signode += nodes . reference ( '' , nodes . Text ( '[Source: v{}]' . format ( version ) ) , refuri = source_uri , classes = [ 'viewcode-link' ] ) desc = nodes . paragraph ( ) desc += nodes . Text ( description , description ) return [ node , desc ] | Generate a process definition header . |
45,142 | def make_process_node ( self , process ) : name = process [ 'name' ] slug = process [ 'slug' ] typ = process [ 'type' ] version = process [ 'version' ] description = process . get ( 'description' , '' ) source_uri = process [ 'source_uri' ] inputs = process . get ( 'input' , [ ] ) outputs = process . get ( 'output' , [ ] ) section = nodes . section ( ids = [ 'process-' + slug ] ) section += nodes . title ( name , name ) section += self . make_process_header ( slug , typ , version , source_uri , description , inputs ) container_node = nodes . container ( classes = [ 'toggle' ] ) container_header = nodes . paragraph ( classes = [ 'header' ] ) container_header += nodes . strong ( text = 'Input arguments' ) container_node += container_header container_body = nodes . container ( ) for field_schema , _ , path in iterate_schema ( { } , inputs , '' ) : container_body += nodes . strong ( text = path ) container_body += self . make_properties_list ( field_schema ) container_node += container_body section += container_node container_node = nodes . container ( classes = [ 'toggle' ] ) container_header = nodes . paragraph ( classes = [ 'header' ] ) container_header += nodes . strong ( text = 'Output results' ) container_node += container_header container_body = nodes . container ( ) for field_schema , _ , path in iterate_schema ( { } , outputs , '' ) : container_body += nodes . strong ( text = path ) container_body += self . make_properties_list ( field_schema ) container_node += container_body section += container_node return [ section , addnodes . index ( entries = [ ( 'single' , name , 'process-' + slug , '' , None ) ] ) ] | Fill the content of process definiton node . |
45,143 | def run ( self ) : config = self . state . document . settings . env . config processes = get_processes ( config . autoprocess_process_dir , config . autoprocess_source_base_url ) process_nodes = [ ] for process in sorted ( processes , key = itemgetter ( 'name' ) ) : process_nodes . extend ( self . make_process_node ( process ) ) return process_nodes | Create a list of process definitions . |
45,144 | def run ( self ) : config = self . state . document . settings . env . config processes = get_processes ( config . autoprocess_process_dir , config . autoprocess_source_base_url ) processes . sort ( key = itemgetter ( 'category' ) ) categorized_processes = { k : list ( g ) for k , g in groupby ( processes , itemgetter ( 'category' ) ) } category_sections = { '' : nodes . container ( ids = [ 'categories' ] ) } top_categories = [ ] for category in sorted ( categorized_processes . keys ( ) ) : category_path = '' for category_node in category . split ( ':' ) : parent_category_path = category_path category_path += '{}:' . format ( category_node ) if category_path in category_sections : continue category_name = category_node . capitalize ( ) section = nodes . section ( ids = [ 'category-' + category_node ] ) section += nodes . title ( category_name , category_name ) category_key = category_path [ : - 1 ] if category_key in categorized_processes : listnode = nodes . bullet_list ( ) section += listnode for process in categorized_processes [ category_key ] : par = nodes . paragraph ( ) node = nodes . reference ( '' , process [ 'name' ] , internal = True ) node [ 'refuri' ] = config . autoprocess_definitions_uri + '#process-' + process [ 'slug' ] node [ 'reftitle' ] = process [ 'name' ] par += node listnode += nodes . list_item ( '' , par ) category_sections [ parent_category_path ] += section category_sections [ category_path ] = section if parent_category_path == '' : top_categories . append ( section ) return top_categories | Create a category tree . |
45,145 | def run ( self ) : config = self . state . document . settings . env . config processes = get_processes ( config . autoprocess_process_dir , config . autoprocess_source_base_url ) processes . sort ( key = itemgetter ( 'type' ) ) processes_by_types = { k : list ( g ) for k , g in groupby ( processes , itemgetter ( 'type' ) ) } listnode = nodes . bullet_list ( ) for typ in sorted ( processes_by_types . keys ( ) ) : par = nodes . paragraph ( ) par += nodes . literal ( typ , typ ) par += nodes . Text ( ' - ' ) processes = sorted ( processes_by_types [ typ ] , key = itemgetter ( 'name' ) ) last_process = processes [ - 1 ] for process in processes : node = nodes . reference ( '' , process [ 'name' ] , internal = True ) node [ 'refuri' ] = config . autoprocess_definitions_uri + '#process-' + process [ 'slug' ] node [ 'reftitle' ] = process [ 'name' ] par += node if process != last_process : par += nodes . Text ( ', ' ) listnode += nodes . list_item ( '' , par ) return [ listnode ] | Create a type list . |
45,146 | async def synchronize ( self , pid , vendor_specific = None ) : return await self . _request_pyxb ( "post" , [ "synchronize" , pid ] , { } , mmp_dict = { "pid" : pid } , vendor_specific = vendor_specific , ) | Send an object synchronization request to the CN . |
45,147 | def _datetime_to_iso8601 ( self , query_dict ) : return { k : v if not isinstance ( v , datetime . datetime ) else v . isoformat ( ) for k , v in list ( query_dict . items ( ) ) } | Encode any datetime query parameters to ISO8601 . |
45,148 | def to_representation ( self , value ) : value = apply_subfield_projection ( self , value , deep = True ) return super ( ) . to_representation ( value ) | Project outgoing native value . |
45,149 | def validate_bagit_file ( bagit_path ) : _assert_zip_file ( bagit_path ) bagit_zip = zipfile . ZipFile ( bagit_path ) manifest_info_list = _get_manifest_info_list ( bagit_zip ) _validate_checksums ( bagit_zip , manifest_info_list ) return True | Check if a BagIt file is valid . |
45,150 | def create_bagit_stream ( dir_name , payload_info_list ) : zip_file = zipstream . ZipFile ( mode = 'w' , compression = zipstream . ZIP_DEFLATED ) _add_path ( dir_name , payload_info_list ) payload_byte_count , payload_file_count = _add_payload_files ( zip_file , payload_info_list ) tag_info_list = _add_tag_files ( zip_file , dir_name , payload_info_list , payload_byte_count , payload_file_count ) _add_manifest_files ( zip_file , dir_name , payload_info_list , tag_info_list ) _add_tag_manifest_file ( zip_file , dir_name , tag_info_list ) return zip_file | Create a stream containing a BagIt zip archive . |
45,151 | def _add_path ( dir_name , payload_info_list ) : for payload_info_dict in payload_info_list : file_name = payload_info_dict [ 'filename' ] or payload_info_dict [ 'pid' ] payload_info_dict [ 'path' ] = d1_common . utils . filesystem . gen_safe_path ( dir_name , 'data' , file_name ) | Add a key with the path to each payload_info_dict . |
45,152 | def _add_payload_files ( zip_file , payload_info_list ) : payload_byte_count = 0 payload_file_count = 0 for payload_info_dict in payload_info_list : zip_file . write_iter ( payload_info_dict [ 'path' ] , payload_info_dict [ 'iter' ] ) payload_byte_count += payload_info_dict [ 'iter' ] . size payload_file_count += 1 return payload_byte_count , payload_file_count | Add the payload files to the zip . |
45,153 | def _add_tag_files ( zip_file , dir_name , payload_info_list , payload_byte_count , payload_file_count ) : tag_info_list = [ ] _add_tag_file ( zip_file , dir_name , tag_info_list , _gen_bagit_text_file_tup ( ) ) _add_tag_file ( zip_file , dir_name , tag_info_list , _gen_bag_info_file_tup ( payload_byte_count , payload_file_count ) , ) _add_tag_file ( zip_file , dir_name , tag_info_list , _gen_pid_mapping_file_tup ( payload_info_list ) ) return tag_info_list | Generate the tag files and add them to the zip . |
45,154 | def _add_manifest_files ( zip_file , dir_name , payload_info_list , tag_info_list ) : for checksum_algorithm in _get_checksum_algorithm_set ( payload_info_list ) : _add_tag_file ( zip_file , dir_name , tag_info_list , _gen_manifest_file_tup ( payload_info_list , checksum_algorithm ) , ) | Generate the manifest files and add them to the zip . |
45,155 | def _add_tag_manifest_file ( zip_file , dir_name , tag_info_list ) : _add_tag_file ( zip_file , dir_name , tag_info_list , _gen_tag_manifest_file_tup ( tag_info_list ) ) | Generate the tag manifest file and add it to the zip . |
45,156 | def _add_tag_file ( zip_file , dir_name , tag_info_list , tag_tup ) : tag_name , tag_str = tag_tup tag_path = d1_common . utils . filesystem . gen_safe_path ( dir_name , tag_name ) tag_iter = _create_and_add_tag_iter ( zip_file , tag_path , tag_str ) tag_info_list . append ( { 'path' : tag_path , 'checksum' : d1_common . checksum . calculate_checksum_on_iterator ( tag_iter , TAG_CHECKSUM_ALGO ) , } ) | Add a tag file to zip_file and record info for the tag manifest file . |
45,157 | def filter_queryset ( self , request , queryset , view ) : user = request . user app_label = queryset . model . _meta . app_label model_name = queryset . model . _meta . model_name kwargs = { } if model_name == 'storage' : model_name = 'data' kwargs [ 'perms_filter' ] = 'data__pk__in' if model_name == 'relation' : model_name = 'collection' kwargs [ 'perms_filter' ] = 'collection__pk__in' permission = '{}.view_{}' . format ( app_label , model_name ) return get_objects_for_user ( user , permission , queryset , ** kwargs ) | Filter permissions queryset . |
45,158 | def has_object_permission ( self , request , view , obj ) : if request . user . is_superuser : return True if 'permissions' in view . action : self . perms_map [ 'POST' ] = [ '%(app_label)s.share_%(model_name)s' ] if view . action in [ 'add_data' , 'remove_data' ] : self . perms_map [ 'POST' ] = [ '%(app_label)s.add_%(model_name)s' ] if hasattr ( view , 'get_queryset' ) : queryset = view . get_queryset ( ) else : queryset = getattr ( view , 'queryset' , None ) assert queryset is not None , ( 'Cannot apply DjangoObjectPermissions on a view that ' 'does not set `.queryset` or have a `.get_queryset()` method.' ) model_cls = queryset . model user = request . user perms = self . get_required_object_permissions ( request . method , model_cls ) if not user . has_perms ( perms , obj ) and not AnonymousUser ( ) . has_perms ( perms , obj ) : if request . method in permissions . SAFE_METHODS : raise Http404 read_perms = self . get_required_object_permissions ( 'GET' , model_cls ) if not user . has_perms ( read_perms , obj ) : raise Http404 return False return True | Check object permissions . |
45,159 | def get_active_subject_set ( self , request ) : if django . conf . settings . DEBUG_GMN : if 'HTTP_VENDOR_INCLUDE_CERTIFICATE' in request . META : request . META [ 'SSL_CLIENT_CERT' ] = self . pem_in_http_header_to_pem_in_string ( request . META [ 'HTTP_VENDOR_INCLUDE_CERTIFICATE' ] ) cert_primary_str , cert_equivalent_set = d1_gmn . app . middleware . session_cert . get_subjects ( request ) jwt_subject_list = d1_gmn . app . middleware . session_jwt . validate_jwt_and_get_subject_list ( request ) primary_subject_str = cert_primary_str all_subjects_set = ( cert_equivalent_set | { cert_primary_str } | set ( jwt_subject_list ) ) if len ( jwt_subject_list ) == 1 : jwt_primary_str = jwt_subject_list [ 0 ] if jwt_primary_str != cert_primary_str : if cert_primary_str == d1_common . const . SUBJECT_PUBLIC : primary_subject_str = jwt_primary_str else : logging . warning ( 'Both a certificate and a JWT were provided and the primary ' 'subjects differ. Using the certificate for primary subject and' 'the JWT as equivalent.' ) logging . info ( 'Primary active subject: {}' . format ( primary_subject_str ) ) logging . info ( 'All active subjects: {}' . format ( ', ' . join ( sorted ( all_subjects_set ) ) ) ) if django . conf . settings . DEBUG_GMN : if 'HTTP_VENDOR_INCLUDE_SUBJECTS' in request . META : request . all_subjects_set . update ( request . META [ 'HTTP_VENDOR_INCLUDE_SUBJECTS' ] . split ( '\t' ) ) return primary_subject_str , all_subjects_set | Get a set containing all subjects for which the current connection has been successfully authenticated . |
45,160 | def log_setup ( debug_bool ) : level = logging . DEBUG if debug_bool else logging . INFO logging . config . dictConfig ( { "version" : 1 , "disable_existing_loggers" : False , "formatters" : { "verbose" : { "format" : "%(asctime)s %(levelname)-8s %(name)s %(module)s " "%(process)d %(thread)d %(message)s" , "datefmt" : "%Y-%m-%d %H:%M:%S" , } } , "handlers" : { "console" : { "class" : "logging.StreamHandler" , "formatter" : "verbose" , "level" : level , "stream" : "ext://sys.stdout" , } } , "loggers" : { "" : { "handlers" : [ "console" ] , "level" : level , "class" : "logging.StreamHandler" , } } , } ) | Set up logging . |
45,161 | def connect ( self , dsn ) : self . con = psycopg2 . connect ( dsn ) self . cur = self . con . cursor ( cursor_factory = psycopg2 . extras . DictCursor ) self . con . set_isolation_level ( psycopg2 . extensions . ISOLATION_LEVEL_AUTOCOMMIT ) | Connect to DB . |
45,162 | def order_search ( self , search ) : ordering = self . get_query_param ( 'ordering' , self . ordering ) if not ordering : return search sort_fields = [ ] for raw_ordering in ordering . split ( ',' ) : ordering_field = raw_ordering . lstrip ( '-' ) if ordering_field not in self . ordering_fields : raise ParseError ( 'Ordering by `{}` is not supported.' . format ( ordering_field ) ) ordering_field = self . ordering_map . get ( ordering_field , ordering_field ) direction = '-' if raw_ordering [ 0 ] == '-' else '' sort_fields . append ( '{}{}' . format ( direction , ordering_field ) ) return search . sort ( * sort_fields ) | Order given search by the ordering parameter given in request . |
45,163 | def filter_search ( self , search ) : builder = QueryBuilder ( self . filtering_fields , self . filtering_map , self ) search , unmatched = builder . build ( search , self . get_query_params ( ) ) for argument in self . get_always_allowed_arguments ( ) : unmatched . pop ( argument , None ) if unmatched : msg = 'Unsupported parameter(s): {}. Please use a combination of: {}.' . format ( ', ' . join ( unmatched ) , ', ' . join ( self . filtering_fields ) , ) raise ParseError ( msg ) return search | Filter given search by the filter parameter given in request . |
45,164 | def filter_permissions ( self , search ) : user = self . request . user if user . is_superuser : return search if user . is_anonymous : user = get_anonymous_user ( ) filters = [ Q ( 'match' , users_with_permissions = user . pk ) ] filters . extend ( [ Q ( 'match' , groups_with_permissions = group . pk ) for group in user . groups . all ( ) ] ) filters . append ( Q ( 'match' , public_permission = True ) ) return search . query ( 'bool' , should = filters ) | Filter given query based on permissions of the user in the request . |
45,165 | def paginate_response ( self , queryset , serializers_kwargs = { } ) : page = self . paginate_queryset ( queryset ) if page is not None : serializer = self . get_serializer ( page , many = True , ** serializers_kwargs ) return self . get_paginated_response ( serializer . data ) serializer = self . get_serializer ( queryset , many = True , ** serializers_kwargs ) return Response ( serializer . data ) | Optionally return paginated response . |
45,166 | def search ( self ) : search = self . document_class ( ) . search ( ) search = self . custom_filter ( search ) search = self . filter_search ( search ) search = self . order_search ( search ) search = self . filter_permissions ( search ) if search . count ( ) > ELASTICSEARCH_SIZE : limit = self . paginator . get_limit ( self . request ) if not limit or limit > ELASTICSEARCH_SIZE : raise TooManyResults ( ) search = search . extra ( size = ELASTICSEARCH_SIZE ) return search | Handle the search request . |
45,167 | def list_with_post ( self , request ) : if self . is_search_request ( ) : search = self . search ( ) page = self . paginate_queryset ( search ) if page is None : items = search else : items = page try : primary_keys = [ ] order_map_cases = [ ] for order , item in enumerate ( items ) : pk = item [ self . primary_key_field ] primary_keys . append ( pk ) order_map_cases . append ( When ( pk = pk , then = Value ( order ) ) ) queryset = self . get_queryset ( ) . filter ( pk__in = primary_keys ) . order_by ( Case ( * order_map_cases , output_field = IntegerField ( ) ) . asc ( ) ) except KeyError : raise KeyError ( "Combined viewset requires that your index contains a field with " "the primary key. By default this field is called 'id', but you " "can change it by setting primary_key_field." ) serializer = self . get_serializer ( queryset , many = True ) if page is not None : return self . get_paginated_response ( serializer . data ) return Response ( serializer . data ) else : queryset = self . filter_queryset ( self . get_queryset ( ) ) return self . paginate_response ( queryset ) | Endpoint handler . |
45,168 | def getattr ( self , path , fh ) : self . _raise_error_if_os_special_file ( path ) attribute = self . _get_attributes_through_cache ( path ) return self . _stat_from_attributes ( attribute ) | Called by FUSE when the attributes for a file or directory are required . |
45,169 | def readdir ( self , path , fh ) : log . debug ( 'readdir(): {}' . format ( path ) ) try : dir = self . _directory_cache [ path ] except KeyError : dir = self . _get_directory ( path ) self . _directory_cache [ path ] = dir return dir | Called by FUSE when a directory is opened . |
45,170 | def open ( self , path , flags ) : log . debug ( 'open(): {}' . format ( path ) ) if ( flags & self . _READ_ONLY_ACCESS_MODE ) != os . O_RDONLY : self . _raise_error_permission_denied ( path ) attribute = self . _get_attributes_through_cache ( path ) return attribute . is_dir ( ) | Called by FUSE when a file is opened . |
45,171 | def get_inline_expression ( self , text ) : text = text . strip ( ) if not text . startswith ( self . inline_tags [ 0 ] ) or not text . endswith ( self . inline_tags [ 1 ] ) : return return text [ 2 : - 2 ] | Extract an inline expression from the given text . |
45,172 | def _filter_queryset ( self , queryset ) : entities = self . request . query_params . getlist ( 'entity' ) labels = self . request . query_params . getlist ( 'label' ) positions = self . request . query_params . getlist ( 'position' ) if labels and len ( labels ) != len ( entities ) : raise exceptions . ParseError ( 'If `labels` query parameter is given, also `entities` ' 'must be given and they must be of the same length.' ) if positions and len ( positions ) != len ( entities ) : raise exceptions . ParseError ( 'If `positions` query parameter is given, also `entities` ' 'must be given and they must be of the same length.' ) if entities : for entity , label , position in zip_longest ( entities , labels , positions ) : filter_params = { 'entities__pk' : entity } if label : filter_params [ 'relationpartition__label' ] = label if position : filter_params [ 'relationpartition__position' ] = position queryset = queryset . filter ( ** filter_params ) return queryset | Filter queryset by entity label and position . |
45,173 | def update ( self , request , * args , ** kwargs ) : instance = self . get_object ( ) if ( not request . user . has_perm ( 'edit_collection' , instance . collection ) and not request . user . is_superuser ) : return Response ( status = status . HTTP_401_UNAUTHORIZED ) return super ( ) . update ( request , * args , ** kwargs ) | Update the Relation object . |
45,174 | def _process_permission ( perm ) : codename = perm . permission . codename if not codename . startswith ( 'view' ) and not codename . startswith ( 'owner' ) : return index_builder . build ( perm . content_object ) | Rebuild indexes affected by the given permission . |
45,175 | def parse_response ( response , encoding = 'utf-8' ) : return requests_toolbelt . multipart . decoder . MultipartDecoder . from_response ( response , encoding ) . parts | Parse a multipart Requests . Response into a tuple of BodyPart objects . |
45,176 | def parse_str ( mmp_bytes , content_type , encoding = 'utf-8' ) : return requests_toolbelt . multipart . decoder . MultipartDecoder ( mmp_bytes , content_type , encoding ) . parts | Parse multipart document bytes into a tuple of BodyPart objects . |
45,177 | def normalize ( body_part_tup , ) : return '\n\n' . join ( [ '{}\n\n{}' . format ( str ( p . headers [ b'Content-Disposition' ] , p . encoding ) , p . text ) for p in sorted ( body_part_tup , key = lambda p : p . headers [ b'Content-Disposition' ] ) ] ) | Normalize a tuple of BodyPart objects to a string . |
45,178 | def _generate_readme_text ( self , object_tree_path ) : wdef_folder = self . _object_tree . get_source_tree_folder ( object_tree_path ) res = StringIO ( ) if len ( object_tree_path ) : folder_name = object_tree_path [ - 1 ] else : folder_name = 'root' header = 'ObjectTree Folder "{}"' . format ( folder_name ) res . write ( header + '\n' ) res . write ( '{}\n\n' . format ( '=' * len ( header ) ) ) res . write ( 'The content present in object_tree folders is determined by a list\n' ) res . write ( 'of specific identifiers and by queries applied against the DataONE\n' ) res . write ( 'search index.\n\n' ) res . write ( 'Queries:\n\n' ) if len ( wdef_folder [ 'queries' ] ) : for query in wdef_folder [ 'queries' ] : res . write ( '- {}\n' . format ( query ) ) else : res . write ( 'No queries specified at this level.\n' ) res . write ( '\n\n' ) res . write ( 'Identifiers:\n\n' ) if len ( wdef_folder [ 'identifiers' ] ) : for pid in wdef_folder [ 'identifiers' ] : res . write ( '- {}\n' . format ( pid ) ) else : res . write ( 'No individual identifiers selected at this level.\n' ) res . write ( '\n\n' ) res . write ( 'Sub-folders:\n\n' ) if len ( wdef_folder [ 'collections' ] ) : for f in wdef_folder [ 'collections' ] : res . write ( '- {}\n' . format ( f ) ) else : res . write ( 'No object_tree sub-folders are specified at this level.\n' ) return res . getvalue ( ) . encode ( 'utf-8' ) | Generate a human readable description of the folder in text format . |
45,179 | def _serialize_data ( self , data ) : if self . request and self . request . query_params . get ( 'hydrate_data' , False ) : serializer = DataSerializer ( data , many = True , read_only = True ) serializer . bind ( 'data' , self ) return serializer . data else : return [ d . id for d in data ] | Return serialized data or list of ids depending on hydrate_data query param . |
45,180 | def _filter_queryset ( self , perms , queryset ) : user = self . request . user if self . request else AnonymousUser ( ) return get_objects_for_user ( user , perms , queryset ) | Filter object objects by permissions of user in request . |
45,181 | def get_data ( self , collection ) : data = self . _filter_queryset ( 'view_data' , collection . data . all ( ) ) return self . _serialize_data ( data ) | Return serialized list of data objects on collection that user has view permission on . |
45,182 | def load_engines ( manager , class_name , base_module , engines , class_key = 'ENGINE' , engine_type = 'engine' ) : loaded_engines = { } for module_name_or_dict in engines : if not isinstance ( module_name_or_dict , dict ) : module_name_or_dict = { class_key : module_name_or_dict } try : module_name = module_name_or_dict [ class_key ] engine_settings = module_name_or_dict except KeyError : raise ImproperlyConfigured ( "If {} specification is a dictionary, it must define {}" . format ( engine_type , class_key ) ) try : engine_module = import_module ( module_name ) try : engine = getattr ( engine_module , class_name ) ( manager = manager , settings = engine_settings ) if not isinstance ( engine , BaseEngine ) : raise ImproperlyConfigured ( "{} module {} class {} must extend BaseEngine" . format ( engine_type . capitalize ( ) , module_name , class_name ) ) except AttributeError : raise ImproperlyConfigured ( "{} module {} is missing a {} class" . format ( engine_type . capitalize ( ) , module_name , class_name ) ) if engine . get_name ( ) in loaded_engines : raise ImproperlyConfigured ( "Duplicated {} {}" . format ( engine_type , engine . get_name ( ) ) ) loaded_engines [ engine . get_name ( ) ] = engine except ImportError as ex : engine_dir = os . path . join ( os . path . dirname ( upath ( __file__ ) ) , base_module ) try : builtin_engines = [ name for _ , name , _ in pkgutil . iter_modules ( [ engine_dir ] ) ] except EnvironmentError : builtin_engines = [ ] if module_name not in [ 'resolwe.flow.{}.{}' . format ( base_module , builtin_engine ) for builtin_engine in builtin_engines ] : engine_reprs = map ( repr , sorted ( builtin_engines ) ) error_msg = ( "{} isn't an available dataflow {}.\n" "Try using 'resolwe.flow.{}.XXX', where XXX is one of:\n" " {}\n" "Error was: {}" . format ( module_name , engine_type , base_module , ", " . join ( engine_reprs ) , ex ) ) raise ImproperlyConfigured ( error_msg ) else : raise return loaded_engines | Load engines . |
45,183 | def normalize_in_place ( sysmeta_pyxb , reset_timestamps = False ) : if sysmeta_pyxb . accessPolicy is not None : sysmeta_pyxb . accessPolicy = d1_common . wrap . access_policy . get_normalized_pyxb ( sysmeta_pyxb . accessPolicy ) if getattr ( sysmeta_pyxb , 'mediaType' , False ) : d1_common . xml . sort_value_list_pyxb ( sysmeta_pyxb . mediaType . property_ ) if getattr ( sysmeta_pyxb , 'replicationPolicy' , False ) : d1_common . xml . sort_value_list_pyxb ( sysmeta_pyxb . replicationPolicy . preferredMemberNode ) d1_common . xml . sort_value_list_pyxb ( sysmeta_pyxb . replicationPolicy . blockedMemberNode ) d1_common . xml . sort_elements_by_child_values ( sysmeta_pyxb . replica , [ 'replicaVerified' , 'replicaMemberNode' , 'replicationStatus' ] , ) sysmeta_pyxb . archived = bool ( sysmeta_pyxb . archived ) if reset_timestamps : epoch_dt = datetime . datetime ( 1970 , 1 , 1 , tzinfo = d1_common . date_time . UTC ( ) ) sysmeta_pyxb . dateUploaded = epoch_dt sysmeta_pyxb . dateSysMetadataModified = epoch_dt for replica_pyxb in getattr ( sysmeta_pyxb , 'replica' , [ ] ) : replica_pyxb . replicaVerified = epoch_dt else : sysmeta_pyxb . dateUploaded = d1_common . date_time . round_to_nearest ( sysmeta_pyxb . dateUploaded ) sysmeta_pyxb . dateSysMetadataModified = d1_common . date_time . round_to_nearest ( sysmeta_pyxb . dateSysMetadataModified ) for replica_pyxb in getattr ( sysmeta_pyxb , 'replica' , [ ] ) : replica_pyxb . replicaVerified = d1_common . date_time . round_to_nearest ( replica_pyxb . replicaVerified ) | Normalize SystemMetadata PyXB object in - place . |
45,184 | def are_equivalent_pyxb ( a_pyxb , b_pyxb , ignore_timestamps = False ) : normalize_in_place ( a_pyxb , ignore_timestamps ) normalize_in_place ( b_pyxb , ignore_timestamps ) a_xml = d1_common . xml . serialize_to_xml_str ( a_pyxb ) b_xml = d1_common . xml . serialize_to_xml_str ( b_pyxb ) are_equivalent = d1_common . xml . are_equivalent ( a_xml , b_xml ) if not are_equivalent : logger . debug ( 'XML documents not equivalent:' ) logger . debug ( d1_common . xml . format_diff_xml ( a_xml , b_xml ) ) return are_equivalent | Determine if SystemMetadata PyXB objects are semantically equivalent . |
45,185 | def are_equivalent_xml ( a_xml , b_xml , ignore_timestamps = False ) : return are_equivalent_pyxb ( d1_common . xml . deserialize ( a_xml ) , d1_common . xml . deserialize ( b_xml ) , ignore_timestamps , ) | Determine if two SystemMetadata XML docs are semantically equivalent . |
45,186 | def update_elements ( dst_pyxb , src_pyxb , el_list ) : invalid_element_set = set ( el_list ) - set ( SYSMETA_ROOT_CHILD_LIST ) if invalid_element_set : raise ValueError ( 'Passed one or more invalid elements. invalid="{}"' . format ( ', ' . join ( sorted ( list ( invalid_element_set ) ) ) ) ) for el_str in el_list : setattr ( dst_pyxb , el_str , getattr ( src_pyxb , el_str , None ) ) | Copy elements specified in el_list from src_pyxb to dst_pyxb |
45,187 | def get_full_perm ( perm , obj ) : ctype = ContentType . objects . get_for_model ( obj ) ctype = str ( ctype ) . replace ( ' ' , '' ) return '{}_{}' . format ( perm . lower ( ) , ctype ) | Join action with the content type of obj . |
45,188 | def copy_permissions ( src_obj , dest_obj ) : def _process_permission ( codename , user_or_group , dest_obj , relabel ) : if relabel : codename = change_perm_ctype ( codename , dest_obj ) if codename not in dest_all_perms : return assign_perm ( codename , user_or_group , dest_obj ) src_obj_ctype = ContentType . objects . get_for_model ( src_obj ) dest_obj_ctype = ContentType . objects . get_for_model ( dest_obj ) dest_all_perms = get_all_perms ( dest_obj ) relabel = ( src_obj_ctype != dest_obj_ctype ) for perm in UserObjectPermission . objects . filter ( object_pk = src_obj . pk , content_type = src_obj_ctype ) : _process_permission ( perm . permission . codename , perm . user , dest_obj , relabel ) for perm in GroupObjectPermission . objects . filter ( object_pk = src_obj . pk , content_type = src_obj_ctype ) : _process_permission ( perm . permission . codename , perm . group , dest_obj , relabel ) | Copy permissions form src_obj to dest_obj . |
45,189 | def fetch_user ( query ) : user_filter = { 'pk' : query } if query . isdigit ( ) else { 'username' : query } user_model = get_user_model ( ) try : return user_model . objects . get ( ** user_filter ) except user_model . DoesNotExist : raise exceptions . ParseError ( "Unknown user: {}" . format ( query ) ) | Get user by pk or username . Raise error if it doesn t exist . |
45,190 | def fetch_group ( query ) : group_filter = { 'pk' : query } if query . isdigit ( ) else { 'name' : query } try : return Group . objects . get ( ** group_filter ) except Group . DoesNotExist : raise exceptions . ParseError ( "Unknown group: {}" . format ( query ) ) | Get group by pk or name . Raise error if it doesn t exist . |
45,191 | def check_owner_permission ( payload , allow_user_owner ) : for entity_type in [ 'users' , 'groups' ] : for perm_type in [ 'add' , 'remove' ] : for perms in payload . get ( entity_type , { } ) . get ( perm_type , { } ) . values ( ) : if 'owner' in perms : if entity_type == 'users' and allow_user_owner : continue if entity_type == 'groups' : raise exceptions . ParseError ( "Owner permission cannot be assigned to a group" ) raise exceptions . PermissionDenied ( "Only owners can grant/revoke owner permission" ) | Raise PermissionDenied if owner found in data . |
45,192 | def check_public_permissions ( payload ) : allowed_public_permissions = [ 'view' , 'add' , 'download' ] for perm_type in [ 'add' , 'remove' ] : for perm in payload . get ( 'public' , { } ) . get ( perm_type , [ ] ) : if perm not in allowed_public_permissions : raise exceptions . PermissionDenied ( "Permissions for public users are too open" ) | Raise PermissionDenied if public permissions are too open . |
45,193 | def check_user_permissions ( payload , user_pk ) : for perm_type in [ 'add' , 'remove' ] : user_pks = payload . get ( 'users' , { } ) . get ( perm_type , { } ) . keys ( ) if user_pk in user_pks : raise exceptions . PermissionDenied ( "You cannot change your own permissions" ) | Raise PermissionDenied if payload includes user_pk . |
45,194 | def remove_permission ( payload , permission ) : payload = copy . deepcopy ( payload ) for entity_type in [ 'users' , 'groups' ] : for perm_type in [ 'add' , 'remove' ] : for perms in payload . get ( entity_type , { } ) . get ( perm_type , { } ) . values ( ) : if permission in perms : perms . remove ( permission ) for perm_type in [ 'add' , 'remove' ] : perms = payload . get ( 'public' , { } ) . get ( perm_type , [ ] ) if permission in perms : perms . remove ( permission ) return payload | Remove all occurrences of permission from payload . |
45,195 | def update_permission ( obj , data ) : full_permissions = get_all_perms ( obj ) def apply_perm ( perm_func , perms , entity ) : if perms == 'ALL' : perms = full_permissions for perm in perms : perm_codename = get_full_perm ( perm , obj ) if perm_codename not in full_permissions : raise exceptions . ParseError ( "Unknown permission: {}" . format ( perm ) ) perm_func ( perm_codename , entity , obj ) def set_permissions ( entity_type , perm_type ) : perm_func = assign_perm if perm_type == 'add' else remove_perm fetch_fn = fetch_user if entity_type == 'users' else fetch_group for entity_id in data . get ( entity_type , { } ) . get ( perm_type , [ ] ) : entity = fetch_fn ( entity_id ) if entity : perms = data [ entity_type ] [ perm_type ] [ entity_id ] apply_perm ( perm_func , perms , entity ) def set_public_permissions ( perm_type ) : perm_func = assign_perm if perm_type == 'add' else remove_perm user = AnonymousUser ( ) perms = data . get ( 'public' , { } ) . get ( perm_type , [ ] ) apply_perm ( perm_func , perms , user ) with transaction . atomic ( ) : set_permissions ( 'users' , 'add' ) set_permissions ( 'users' , 'remove' ) set_permissions ( 'groups' , 'add' ) set_permissions ( 'groups' , 'remove' ) set_public_permissions ( 'add' ) set_public_permissions ( 'remove' ) | Update object permissions . |
45,196 | def assign_contributor_permissions ( obj , contributor = None ) : for permission in get_all_perms ( obj ) : assign_perm ( permission , contributor if contributor else obj . contributor , obj ) | Assign all permissions to object s contributor . |
45,197 | async def _make_connection ( self ) : return await aioredis . create_redis ( 'redis://{}:{}' . format ( self . _redis_params . get ( 'host' , 'localhost' ) , self . _redis_params . get ( 'port' , 6379 ) ) , db = int ( self . _redis_params . get ( 'db' , 1 ) ) ) | Construct a connection to Redis . |
45,198 | async def _call_redis ( self , meth , * args , ** kwargs ) : while True : try : if not self . _redis : self . _redis = await self . _make_connection ( ) return await meth ( self . _redis , * args , ** kwargs ) except aioredis . RedisError : logger . exception ( "Redis connection error" ) if self . _redis : self . _redis . close ( ) await self . _redis . wait_closed ( ) self . _redis = None await asyncio . sleep ( 3 ) | Perform a Redis call and handle connection dropping . |
45,199 | async def clear_queue ( self ) : conn = await self . _make_connection ( ) try : script = await conn . eval ( script , keys = [ ] , args = [ '*{}*' . format ( settings . FLOW_MANAGER [ 'REDIS_PREFIX' ] ) ] , ) finally : conn . close ( ) | Reset the executor queue channel to an empty state . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.