idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
44,900
def get_user_token ( ) : if not hasattr ( stack . top , 'current_user' ) : return '' current_user = stack . top . current_user return current_user . get ( 'token' , '' )
Return the authenticated user s auth token
44,901
def get_token_issuer ( ) : try : current_user = stack . top . current_user return current_user . get ( 'iss' , get_config ( ) . jwt_issuer ) except Exception : pass return get_config ( ) . jwt_issuer
Return the issuer in which this user s token was created
44,902
def _search ( self , limit , format ) : limit = min ( limit , self . MAX_SEARCH_PER_QUERY ) payload = { 'q' : self . query , 'count' : limit , 'offset' : self . current_offset , } payload . update ( self . CUSTOM_PARAMS ) headers = { 'Ocp-Apim-Subscription-Key' : self . api_key } if not self . silent_fail : QueryChecker . check_web_params ( payload , headers ) response = requests . get ( self . QUERY_URL , params = payload , headers = headers ) json_results = self . get_json_results ( response ) packaged_results = [ NewsResult ( single_result_json ) for single_result_json in json_results [ "value" ] ] self . current_offset += min ( 50 , limit , len ( packaged_results ) ) return packaged_results
Returns a list of result objects with the url for the next page MsCognitive search url .
44,903
def create ( self , ** kwargs ) : body = self . client . create ( url = self . base_url , json = kwargs ) return body
Create a notification .
44,904
def get ( self , ** kwargs ) : url = '%s/%s' % ( self . base_url , kwargs [ 'notification_id' ] ) resp = self . client . list ( path = url ) return resp
Get the details for a specific notification .
44,905
def delete ( self , ** kwargs ) : url = self . base_url + '/%s' % kwargs [ 'notification_id' ] resp = self . client . delete ( url = url ) return resp
Delete a notification .
44,906
def update ( self , ** kwargs ) : url_str = self . base_url + '/%s' % kwargs [ 'alarm_id' ] del kwargs [ 'alarm_id' ] resp = self . client . create ( url = url_str , method = 'PUT' , json = kwargs ) return resp
Update a specific alarm definition .
44,907
def compute_err ( self , solution_y , coefficients ) : error = 0 for modeled , expected in zip ( solution_y , self . expected_values ) : error += abs ( modeled - expected ) if any ( [ c < 0 for c in coefficients ] ) : error *= 1.5 return error
Return an error value by finding the absolute difference for each element in a list of solution - generated y - values versus expected values .
44,908
def read_config ( ip , mac ) : click . echo ( "Read configuration from %s" % ip ) request = requests . get ( 'http://{}/{}/{}/' . format ( ip , URI , mac ) , timeout = TIMEOUT ) print ( request . json ( ) )
Read the current configuration of a myStrom device .
44,909
def write_config ( ip , mac , single , double , long , touch ) : click . echo ( "Write configuration to device %s" % ip ) data = { 'single' : single , 'double' : double , 'long' : long , 'touch' : touch , } request = requests . post ( 'http://{}/{}/{}/' . format ( ip , URI , mac ) , data = data , timeout = TIMEOUT ) if request . status_code == 200 : click . echo ( "Configuration of %s set" % mac )
Write the current configuration of a myStrom button .
44,910
def write_ha_config ( ip , mac , hass , port , id ) : click . echo ( "Write configuration for Home Assistant to device %s..." % ip ) action = "get://{1}:{2}/api/mystrom?{0}={3}" data = { 'single' : action . format ( 'single' , hass , port , id ) , 'double' : action . format ( 'double' , hass , port , id ) , 'long' : action . format ( 'long' , hass , port , id ) , 'touch' : action . format ( 'touch' , hass , port , id ) , } request = requests . post ( 'http://{}/{}/{}/' . format ( ip , URI , mac ) , data = data , timeout = TIMEOUT ) if request . status_code == 200 : click . echo ( "Configuration for %s set" % ip ) click . echo ( "After using the push pattern the first time then " "the myStrom WiFi Button will show up as %s" % id )
Write the configuration for Home Assistant to a myStrom button .
44,911
def reset_config ( ip , mac ) : click . echo ( "Reset configuration of button %s..." % ip ) data = { 'single' : "" , 'double' : "" , 'long' : "" , 'touch' : "" , } request = requests . post ( 'http://{}/{}/{}/' . format ( ip , URI , mac ) , data = data , timeout = TIMEOUT ) if request . status_code == 200 : click . echo ( "Reset configuration of %s" % mac )
Reset the current configuration of a myStrom WiFi Button .
44,912
def color ( ip , mac , hue , saturation , value ) : bulb = MyStromBulb ( ip , mac ) bulb . set_color_hsv ( hue , saturation , value )
Switch the bulb on with the given color .
44,913
def to_epoch ( t ) : if isinstance ( t , str ) : if '+' not in t : t = t + '+00:00' t = parser . parse ( t ) elif t . tzinfo is None or t . tzinfo . utcoffset ( t ) is None : t = t . replace ( tzinfo = pytz . timezone ( 'utc' ) ) t0 = datetime . datetime ( 1970 , 1 , 1 , 0 , 0 , 0 , 0 , pytz . timezone ( 'utc' ) ) delta = t - t0 return int ( delta . total_seconds ( ) )
Take a datetime either as a string or a datetime . datetime object and return the corresponding epoch
44,914
def get_container_version ( ) : root_dir = os . path . dirname ( os . path . realpath ( sys . argv [ 0 ] ) ) version_file = os . path . join ( root_dir , 'VERSION' ) if os . path . exists ( version_file ) : with open ( version_file ) as f : return f . read ( ) return ''
Return the version of the docker container running the present server or if not in a container
44,915
def run_all ( plot = True , seed = None ) : if seed is not None : import random random . seed ( seed ) print ( "Running biggest_multiple.py" ) biggest_multiple . run ( plot = plot ) print ( "Running polynomials.py" ) polynomials . run ( plot = plot ) print ( "Running travelling_salesman.py" ) travelling_salesman . run ( plot = plot ) print ( "Running irrigation.py" ) irrigation . run ( )
Run all examples .
44,916
def copy ( self ) : return type ( self ) ( self . dna , suppressed = self . suppressed , name = self . name )
Return a new instance of this gene with the same DNA .
44,917
def _check_dna ( self , dna ) : valid_chars = set ( self . GENETIC_MATERIAL_OPTIONS ) assert all ( char in valid_chars for char in dna )
Check that a DNA string only contains characters in GENETIC_MATERIAL_OPTIONS .
44,918
def mutate ( self , p_mutate ) : new_dna = [ ] for bit in self . dna : if random . random ( ) < p_mutate : bit = '1' if bit == '0' else '0' new_dna . append ( bit ) self . dna = '' . join ( new_dna )
Check each element for mutation swapping 0 for 1 and vice - versa .
44,919
def deep_merge ( base , extra ) : if extra is None : return for key , value in extra . items ( ) : if value is None : if key in base : del base [ key ] elif isinstance ( base . get ( key ) , dict ) and isinstance ( value , dict ) : deep_merge ( base [ key ] , value ) else : base [ key ] = value
Deeply merge two dictionaries overriding existing keys in the base .
44,920
def make_client ( api_version , session = None , endpoint = None , service_type = 'monitoring' ) : client_cls = utils . get_client_class ( 'monitoring' , api_version , VERSION_MAP ) c = client_cls ( session = session , service_type = service_type , endpoint = endpoint , app_name = 'monascaclient' , app_version = version . version_string , ) return c
Returns an monitoring API client .
44,921
def create_command_class ( name , func_module ) : cmd_name = name [ 3 : ] . replace ( '_' , '-' ) callback = getattr ( func_module , name ) desc = callback . __doc__ or '' help = desc . strip ( ) . split ( '\n' ) [ 0 ] arguments = getattr ( callback , 'arguments' , [ ] ) body = { '_args' : arguments , '_callback' : staticmethod ( callback ) , '_description' : desc , '_epilog' : desc , '_help' : help } claz = type ( '%sCommand' % cmd_name . title ( ) . replace ( '-' , '' ) , ( MigratingCommand , ) , body ) return cmd_name , claz
Dynamically creates subclass of MigratingCommand .
44,922
def _session ( kwargs ) : if 'session' in kwargs : LOG . debug ( 'Reusing session' ) sess = kwargs . get ( 'session' ) if not isinstance ( sess , k_session . Session ) : msg = ( 'session should be an instance of %s' % k_session . Session ) LOG . error ( msg ) raise RuntimeError ( msg ) else : LOG . debug ( 'Initializing new session' ) auth = _get_auth_handler ( kwargs ) sess = _get_session ( auth , kwargs ) return sess
Returns or reuses session .
44,923
def load ( cls , path ) : if path is None : return cls . load_from_dict ( { } ) else : d = io . load ( path ) return cls . load_from_dict ( d )
Loads an instance of the class from a file .
44,924
def register ( cls , name ) : def register_decorator ( reg_cls ) : def name_func ( self ) : return name reg_cls . name = property ( name_func ) assert issubclass ( reg_cls , cls ) , "Must be subclass matching your NamedRegistry class" cls . REGISTRY [ name ] = reg_cls return reg_cls return register_decorator
Decorator to register a class .
44,925
def construct ( cls , name , * args , ** kwargs ) : return cls . REGISTRY [ name ] ( * args , ** kwargs )
Constructs an instance of an object given its name .
44,926
def resize_by_factor ( im , factor ) : _import_skimage ( ) from skimage . transform . pyramids import pyramid_reduce , pyramid_expand if factor < 1 : return pyramid_reduce ( im , downscale = 1 / factor ) elif factor > 1 : return pyramid_expand ( im , upscale = factor ) else : return im
Resizes the image according to a factor . The image is pre - filtered with a Gaussian and then resampled with bilinear interpolation .
44,927
def asgray ( im ) : if im . ndim == 2 : return im elif im . ndim == 3 and im . shape [ 2 ] in ( 3 , 4 ) : return im [ ... , : 3 ] . mean ( axis = - 1 ) else : raise ValueError ( 'Invalid image format' )
Takes an image and returns its grayscale version by averaging the color channels . if an alpha channel is present it will simply be ignored . If a grayscale image is given the original image is returned .
44,928
def load ( path , dtype = np . float64 ) : _import_skimage ( ) import skimage . io im = skimage . io . imread ( path ) if dtype == np . uint8 : return im elif dtype in { np . float16 , np . float32 , np . float64 } : return im . astype ( dtype ) / 255 else : raise ValueError ( 'Unsupported dtype' )
Loads an image from file .
44,929
def save ( path , im ) : from PIL import Image if im . dtype == np . uint8 : pil_im = Image . fromarray ( im ) else : pil_im = Image . fromarray ( ( im * 255 ) . astype ( np . uint8 ) ) pil_im . save ( path )
Saves an image to file .
44,930
def integrate ( ii , r0 , c0 , r1 , c1 ) : S = np . zeros ( ii . shape [ - 1 ] ) S += ii [ r1 , c1 ] if ( r0 - 1 >= 0 ) and ( c0 - 1 >= 0 ) : S += ii [ r0 - 1 , c0 - 1 ] if ( r0 - 1 >= 0 ) : S -= ii [ r0 - 1 , c1 ] if ( c0 - 1 >= 0 ) : S -= ii [ r1 , c0 - 1 ] return S
Use an integral image to integrate over a given window .
44,931
def offset ( img , offset , fill_value = 0 ) : sh = img . shape if sh == ( 0 , 0 ) : return img else : x = np . empty ( sh ) x [ : ] = fill_value x [ max ( offset [ 0 ] , 0 ) : min ( sh [ 0 ] + offset [ 0 ] , sh [ 0 ] ) , max ( offset [ 1 ] , 0 ) : min ( sh [ 1 ] + offset [ 1 ] , sh [ 1 ] ) ] = img [ max ( - offset [ 0 ] , 0 ) : min ( sh [ 0 ] - offset [ 0 ] , sh [ 0 ] ) , max ( - offset [ 1 ] , 0 ) : min ( sh [ 1 ] - offset [ 1 ] , sh [ 1 ] ) ] return x
Moves the contents of image without changing the image size . The missing values are given a specified fill value .
44,932
def bounding_box ( alpha , threshold = 0.1 ) : assert alpha . ndim == 2 supp_axs = [ alpha . max ( axis = 1 - i ) for i in range ( 2 ) ] bb = [ np . where ( supp_axs [ i ] > threshold ) [ 0 ] [ [ 0 , - 1 ] ] for i in range ( 2 ) ] return ( bb [ 0 ] [ 0 ] , bb [ 1 ] [ 0 ] , bb [ 0 ] [ 1 ] , bb [ 1 ] [ 1 ] )
Returns a bounding box of the support .
44,933
def bounding_box_as_binary_map ( alpha , threshold = 0.1 ) : bb = bounding_box ( alpha ) x = np . zeros ( alpha . shape , dtype = np . bool_ ) x [ bb [ 0 ] : bb [ 2 ] , bb [ 1 ] : bb [ 3 ] ] = 1 return x
Similar to bounding_box except returns the bounding box as a binary map the same size as the input .
44,934
def extract_patches ( images , patch_shape , samples_per_image = 40 , seed = 0 , cycle = True ) : rs = np . random . RandomState ( seed ) for Xi in itr . cycle ( images ) : w , h = [ Xi . shape [ i ] - patch_shape [ i ] for i in range ( 2 ) ] assert w > 0 and h > 0 indices = np . asarray ( list ( itr . product ( range ( w ) , range ( h ) ) ) ) rs . shuffle ( indices ) for x , y in indices [ : samples_per_image ] : yield Xi [ x : x + patch_shape [ 0 ] , y : y + patch_shape [ 1 ] ]
Takes a set of images and yields randomly chosen patches of specified size .
44,935
def bytesize ( arr ) : byte_size = np . prod ( arr . shape ) * np . dtype ( arr . dtype ) . itemsize return byte_size
Returns the memory byte size of a Numpy array as an integer .
44,936
def apply_once ( func , arr , axes , keepdims = True ) : all_axes = np . arange ( arr . ndim ) if isinstance ( axes , int ) : axes = { axes } else : axes = set ( axis % arr . ndim for axis in axes ) principal_axis = min ( axes ) for i , axis in enumerate ( axes ) : axis0 = principal_axis + i if axis != axis0 : all_axes [ axis0 ] , all_axes [ axis ] = all_axes [ axis ] , all_axes [ axis0 ] transposed_arr = arr . transpose ( all_axes ) new_shape = [ ] new_shape_keepdims = [ ] for axis , dim in enumerate ( arr . shape ) : if axis == principal_axis : new_shape . append ( - 1 ) elif axis not in axes : new_shape . append ( dim ) if axis in axes : new_shape_keepdims . append ( 1 ) else : new_shape_keepdims . append ( dim ) collapsed = np . apply_along_axis ( func , principal_axis , transposed_arr . reshape ( new_shape ) ) if keepdims : return collapsed . reshape ( new_shape_keepdims ) else : return collapsed
Similar to numpy . apply_over_axes except this performs the operation over a flattened version of all the axes meaning that the function will only be called once . This only makes a difference for non - linear functions .
44,937
def tupled_argmax ( a ) : return np . unravel_index ( np . argmax ( a ) , np . shape ( a ) )
Argmax that returns an index tuple . Note that numpy . argmax will return a scalar index as if you had flattened the array .
44,938
def pad ( data , padwidth , value = 0.0 ) : data = np . asarray ( data ) shape = data . shape if isinstance ( padwidth , int ) : padwidth = ( padwidth , ) * len ( shape ) padded_shape = tuple ( map ( lambda ix : ix [ 1 ] + padwidth [ ix [ 0 ] ] * 2 , enumerate ( shape ) ) ) new_data = np . empty ( padded_shape , dtype = data . dtype ) new_data [ ... , : ] = value new_data [ [ slice ( w , - w ) if w > 0 else slice ( None ) for w in padwidth ] ] = data return new_data
Pad an array with a specific value .
44,939
def pad_to_size ( data , shape , value = 0.0 ) : shape = [ data . shape [ i ] if shape [ i ] == - 1 else shape [ i ] for i in range ( len ( shape ) ) ] new_data = np . empty ( shape ) new_data [ : ] = value II = [ slice ( ( shape [ i ] - data . shape [ i ] ) // 2 , ( shape [ i ] - data . shape [ i ] ) // 2 + data . shape [ i ] ) for i in range ( len ( shape ) ) ] new_data [ II ] = data return new_data
This is similar to pad except you specify the final shape of the array .
44,940
def pad_repeat_border ( data , padwidth ) : data = np . asarray ( data ) shape = data . shape if isinstance ( padwidth , int ) : padwidth = ( padwidth , ) * len ( shape ) padded_shape = tuple ( map ( lambda ix : ix [ 1 ] + padwidth [ ix [ 0 ] ] * 2 , enumerate ( shape ) ) ) new_data = np . empty ( padded_shape , dtype = data . dtype ) new_data [ [ slice ( w , - w ) if w > 0 else slice ( None ) for w in padwidth ] ] = data for i , pw in enumerate ( padwidth ) : if pw > 0 : selection = [ slice ( None ) ] * data . ndim selection2 = [ slice ( None ) ] * data . ndim selection [ i ] = slice ( 0 , pw ) selection2 [ i ] = slice ( pw , pw + 1 ) new_data [ tuple ( selection ) ] = new_data [ tuple ( selection2 ) ] selection [ i ] = slice ( - pw , None ) selection2 [ i ] = slice ( - pw - 1 , - pw ) new_data [ tuple ( selection ) ] = new_data [ tuple ( selection2 ) ] return new_data
Similar to pad except the border value from data is used to pad .
44,941
def pad_repeat_border_corner ( data , shape ) : new_data = np . empty ( shape ) new_data [ [ slice ( upper ) for upper in data . shape ] ] = data for i in range ( len ( shape ) ) : selection = [ slice ( None ) ] * i + [ slice ( data . shape [ i ] , None ) ] selection2 = [ slice ( None ) ] * i + [ slice ( data . shape [ i ] - 1 , data . shape [ i ] ) ] new_data [ selection ] = new_data [ selection2 ] return new_data
Similar to pad_repeat_border except the padding is always done on the upper end of each axis and the target size is specified .
44,942
def _dict_native_ok ( d ) : if len ( d ) >= 256 : return False for k in d : if not isinstance ( k , six . string_types ) : return False return True
This checks if a dictionary can be saved natively as HDF5 groups .
44,943
def _load_level ( handler , level , pathtable ) : if isinstance ( level , tables . link . SoftLink ) : pathname = level . target node = level ( ) else : pathname = level . _v_pathname node = level try : return pathtable [ pathname ] except KeyError : pathtable [ pathname ] = _load_nonlink_level ( handler , node , pathtable , pathname ) return pathtable [ pathname ]
Loads level and builds appropriate type handling softlinks if necessary
44,944
def load ( path , group = None , sel = None , unpack = False ) : with tables . open_file ( path , mode = 'r' ) as h5file : pathtable = { } if group is not None : if isinstance ( group , str ) : data = _load_specific_level ( h5file , h5file , group , sel = sel , pathtable = pathtable ) else : data = [ ] for g in group : data_i = _load_specific_level ( h5file , h5file , g , sel = sel , pathtable = pathtable ) data . append ( data_i ) data = tuple ( data ) else : grp = h5file . root auto_unpack = ( DEEPDISH_IO_UNPACK in grp . _v_attrs and grp . _v_attrs [ DEEPDISH_IO_UNPACK ] ) do_unpack = unpack or auto_unpack if do_unpack and len ( grp . _v_children ) == 1 : name = next ( iter ( grp . _v_children ) ) data = _load_specific_level ( h5file , grp , name , sel = sel , pathtable = pathtable ) do_unpack = False elif sel is not None : raise ValueError ( "Must specify group with `sel` unless it " "automatically unpacks" ) else : data = _load_level ( h5file , grp , pathtable ) if DEEPDISH_IO_VERSION_STR in grp . _v_attrs : v = grp . _v_attrs [ DEEPDISH_IO_VERSION_STR ] else : v = 0 if v > IO_VERSION : warnings . warn ( 'This file was saved with a newer version of ' 'deepdish. Please upgrade to make sure it loads ' 'correctly.' ) if do_unpack and isinstance ( data , dict ) and len ( data ) == 1 : data = next ( iter ( data . values ( ) ) ) return data
Loads an HDF5 saved with save .
44,945
def sorted_maybe_numeric ( x ) : all_numeric = all ( map ( str . isdigit , x ) ) if all_numeric : return sorted ( x , key = int ) else : return sorted ( x )
Sorts x with numeric semantics if all keys are nonnegative integers . Otherwise uses standard string sorting .
44,946
def abbreviate ( s , maxlength = 25 ) : assert maxlength >= 4 skip = False abbrv = None i = 0 for j , c in enumerate ( s ) : if c == '\033' : skip = True elif skip : if c == 'm' : skip = False else : i += 1 if i == maxlength - 1 : abbrv = s [ : j ] + '\033[0m...' elif i > maxlength : break if i <= maxlength : return s else : return abbrv
Color - aware abbreviator
44,947
def extend_settings ( self , data_id , files , secrets ) : data = Data . objects . select_related ( 'process' ) . get ( pk = data_id ) files [ ExecutorFiles . DJANGO_SETTINGS ] . update ( { 'USE_TZ' : settings . USE_TZ , 'FLOW_EXECUTOR_TOOLS_PATHS' : self . get_tools_paths ( ) , } ) files [ ExecutorFiles . DATA ] = model_to_dict ( data ) files [ ExecutorFiles . DATA_LOCATION ] = model_to_dict ( data . location ) files [ ExecutorFiles . PROCESS ] = model_to_dict ( data . process ) files [ ExecutorFiles . PROCESS ] [ 'resource_limits' ] = data . process . get_resource_limits ( ) secrets . update ( data . resolve_secrets ( ) )
Extend the settings the manager will serialize .
44,948
def get_tools_paths ( self ) : if settings . DEBUG or is_testing ( ) : return list ( get_apps_tools ( ) . values ( ) ) else : tools_root = settings . FLOW_TOOLS_ROOT subdirs = next ( os . walk ( tools_root ) ) [ 1 ] return [ os . path . join ( tools_root , sdir ) for sdir in subdirs ]
Get tools paths .
44,949
def dump ( self ) : logging . debug ( 'AccessPolicy:' ) map ( logging . debug , [ ' {}' . format ( s ) for s in pprint . pformat ( self . get_normalized_perm_list ( ) ) . splitlines ( ) ] , )
Dump the current state to debug level log .
44,950
def add_authenticated_read ( self ) : self . remove_perm ( d1_common . const . SUBJECT_PUBLIC , 'read' ) self . add_perm ( d1_common . const . SUBJECT_AUTHENTICATED , 'read' )
Add read perm for all authenticated subj .
44,951
def add_verified_read ( self ) : self . remove_perm ( d1_common . const . SUBJECT_PUBLIC , 'read' ) self . add_perm ( d1_common . const . SUBJECT_VERIFIED , 'read' )
Add read perm for all verified subj .
44,952
def add_perm ( self , subj_str , perm_str ) : self . _assert_valid_permission ( perm_str ) self . _perm_dict . setdefault ( perm_str , set ( ) ) . add ( subj_str )
Add a permission for a subject .
44,953
def remove_perm ( self , subj_str , perm_str ) : self . _assert_valid_permission ( perm_str ) for perm_str in self . _equal_or_higher_perm ( perm_str ) : self . _perm_dict . setdefault ( perm_str , set ( ) ) . discard ( subj_str )
Remove permission from a subject .
44,954
def remove_subj ( self , subj_str ) : for subj_set in list ( self . _perm_dict . values ( ) ) : subj_set -= { subj_str }
Remove all permissions for subject .
44,955
def _perm_dict_from_pyxb ( self , access_pyxb ) : subj_dict = self . _subj_dict_from_pyxb ( access_pyxb ) return self . _perm_dict_from_subj_dict ( subj_dict )
Return dict representation of AccessPolicy PyXB obj .
44,956
def _perm_dict_from_subj_dict ( self , subj_dict ) : perm_dict = { } for subj_str , perm_set in list ( subj_dict . items ( ) ) : for perm_str in perm_set : perm_dict . setdefault ( perm_str , set ( ) ) . add ( subj_str ) return perm_dict
Return dict where keys and values of subj_dict have been flipped around .
44,957
def _pyxb_from_perm_dict ( self , perm_dict ) : norm_perm_list = self . _norm_perm_list_from_perm_dict ( perm_dict ) return self . _pyxb_from_norm_perm_list ( norm_perm_list )
Return an AccessPolicy PyXB representation of perm_dict
44,958
def _pyxb_from_norm_perm_list ( self , norm_perm_list ) : access_pyxb = d1_common . types . dataoneTypes . accessPolicy ( ) for perm_str , subj_list in norm_perm_list : rule_pyxb = d1_common . types . dataoneTypes . accessRule ( ) rule_pyxb . permission . append ( perm_str ) for subj_str in subj_list : rule_pyxb . subject . append ( subj_str ) access_pyxb . allow . append ( rule_pyxb ) if len ( access_pyxb . allow ) : return access_pyxb
Return an AccessPolicy PyXB representation of norm_perm_list
44,959
def _subj_dict_from_pyxb ( self , access_pyxb ) : subj_dict = { } for allow_pyxb in access_pyxb . allow : perm_set = set ( ) for perm_pyxb in allow_pyxb . permission : perm_set . add ( perm_pyxb ) for subj_pyxb in allow_pyxb . subject : subj_dict . setdefault ( subj_pyxb . value ( ) , set ( ) ) . update ( perm_set ) return subj_dict
Return a dict representation of access_pyxb which is an AccessPolicy PyXB object .
44,960
def _highest_perm_dict_from_perm_dict ( self , perm_dict ) : highest_perm_dict = copy . copy ( perm_dict ) for ordered_str in reversed ( ORDERED_PERM_LIST ) : for lower_perm in self . _lower_perm_list ( ordered_str ) : highest_perm_dict . setdefault ( lower_perm , set ( ) ) highest_perm_dict [ lower_perm ] -= perm_dict . get ( ordered_str , set ( ) ) return highest_perm_dict
Return a perm_dict where only the highest permission for each subject is included .
44,961
def _norm_perm_list_from_perm_dict ( self , perm_dict ) : high_perm_dict = self . _highest_perm_dict_from_perm_dict ( perm_dict ) return [ [ k , list ( sorted ( high_perm_dict [ k ] ) ) ] for k in ORDERED_PERM_LIST if high_perm_dict . get ( k , False ) ]
Return a minimal ordered hashable list of subjects and permissions .
44,962
def _effective_perm_list_from_iter ( self , perm_iter ) : highest_perm_str = self . _highest_perm_from_iter ( perm_iter ) return ( self . _equal_or_lower_perm_list ( highest_perm_str ) if highest_perm_str is not None else None )
Return list of effective permissions for for highest permission in perm_iter ordered lower to higher or None if perm_iter is empty .
44,963
def _present_perm_set_for_subj ( self , perm_dict , subj_str ) : return { p for p , s in list ( perm_dict . items ( ) ) if subj_str in s }
Return a set containing only the permissions that are present in the perm_dict for subj_str
44,964
def _highest_perm_from_iter ( self , perm_iter ) : perm_set = set ( perm_iter ) for perm_str in reversed ( ORDERED_PERM_LIST ) : if perm_str in perm_set : return perm_str
Return the highest perm present in perm_iter or None if perm_iter is empty .
44,965
def _ordered_idx_from_perm ( self , perm_str ) : for i , ordered_str in enumerate ( ORDERED_PERM_LIST ) : if perm_str == ordered_str : return i
Return the ordered index of perm_str or None if perm_str is not a valid permission .
44,966
def _assert_valid_permission ( self , perm_str ) : if perm_str not in ORDERED_PERM_LIST : raise d1_common . types . exceptions . InvalidRequest ( 0 , 'Permission must be one of {}. perm_str="{}"' . format ( ', ' . join ( ORDERED_PERM_LIST ) , perm_str ) , )
Raise D1 exception if perm_str is not a valid permission .
44,967
def handle_unexpected_exception ( max_traceback_levels = 100 ) : exc_type , exc_msgs = sys . exc_info ( ) [ : 2 ] if exc_type . __name__ == "SSLError" : d1_cli . impl . util . print_error ( ) elif exc_type . __name__ == "timeout" : d1_cli . impl . util . print_error ( ) else : _print_unexpected_exception ( max_traceback_levels )
Suppress stack traces for common errors and provide hints for how to resolve them .
44,968
def _save_sciobj_bytes_from_request ( request , pid ) : sciobj_path = d1_gmn . app . sciobj_store . get_abs_sciobj_file_path_by_pid ( pid ) if hasattr ( request . FILES [ 'object' ] , 'temporary_file_path' ) : d1_common . utils . filesystem . create_missing_directories_for_file ( sciobj_path ) django . core . files . move . file_move_safe ( request . FILES [ 'object' ] . temporary_file_path ( ) , sciobj_path ) else : with d1_gmn . app . sciobj_store . open_sciobj_file_by_path_ctx ( sciobj_path , write = True ) as sciobj_stream : for chunk in request . FILES [ 'object' ] . chunks ( ) : sciobj_stream . write ( chunk )
Django stores small uploads in memory and streams large uploads directly to disk .
44,969
def move_to_collection ( self , source_collection , destination_collection ) : for entity in self : entity . move_to_collection ( source_collection , destination_collection )
Move entities from source to destination collection .
44,970
def move_to_collection ( self , source_collection , destination_collection ) : self . collections . remove ( source_collection ) source_collection . data . remove ( * self . data . all ( ) ) self . collections . add ( destination_collection ) destination_collection . data . add ( * self . data . all ( ) )
Move entity from source to destination collection .
44,971
def quote ( s , unsafe = '/' ) : res = s . replace ( '%' , '%25' ) for c in unsafe : res = res . replace ( c , '%' + ( hex ( ord ( c ) ) . upper ( ) ) [ 2 : ] ) return res
Pass in a dictionary that has unsafe characters as the keys and the percent encoded value as the value .
44,972
def get_dependencies ( self ) : return super ( ) . get_dependencies ( ) + [ Data . collection_set , Data . entity_set , Data . parents , ]
Return dependencies which should trigger updates of this model .
44,973
def get_confirmation ( self ) : if self . clear : action = 'This will DELETE ALL FILES in this location!' else : action = 'This will overwrite existing files!' message = ( "\n" "You have requested to collect static files at the destination\n" "location as specified in your settings\n" "\n" " {destination}\n" "\n" "{action}\n" "Are you sure you want to do this?\n" "\n" "Type 'yes' to continue, or 'no' to cancel: " . format ( destination = self . destination_path , action = action , ) ) if input ( '' . join ( message ) ) != 'yes' : raise CommandError ( "Collecting tools cancelled." )
Get user confirmation to proceed .
44,974
def clear_dir ( self ) : self . stdout . write ( "Deleting contents of '{}'." . format ( self . destination_path ) ) for filename in os . listdir ( self . destination_path ) : if os . path . isfile ( filename ) or os . path . islink ( filename ) : os . remove ( filename ) elif os . path . isdir ( filename ) : shutil . rmtree ( filename )
Delete contents of the directory on the given path .
44,975
def change_path_prefix ( self , path , old_prefix , new_prefix , app_name ) : relative_path = os . path . relpath ( path , old_prefix ) return os . path . join ( new_prefix , app_name , relative_path )
Change path prefix and include app name .
44,976
def collect ( self ) : for app_name , tools_path in get_apps_tools ( ) . items ( ) : self . stdout . write ( "Copying files from '{}'." . format ( tools_path ) ) app_name = app_name . replace ( '.' , '_' ) app_destination_path = os . path . join ( self . destination_path , app_name ) if not os . path . isdir ( app_destination_path ) : os . mkdir ( app_destination_path ) for root , dirs , files in os . walk ( tools_path ) : for dir_name in dirs : dir_source_path = os . path . join ( root , dir_name ) dir_destination_path = self . change_path_prefix ( dir_source_path , tools_path , self . destination_path , app_name ) if not os . path . isdir ( dir_destination_path ) : os . mkdir ( dir_destination_path ) for file_name in files : file_source_path = os . path . join ( root , file_name ) file_destination_path = self . change_path_prefix ( file_source_path , tools_path , self . destination_path , app_name ) shutil . copy2 ( file_source_path , file_destination_path )
Get tools locations and copy them to a single location .
44,977
def handle ( self , ** options ) : self . set_options ( ** options ) os . makedirs ( self . destination_path , exist_ok = True ) if self . interactive and any ( os . listdir ( self . destination_path ) ) : self . get_confirmation ( ) if self . clear : self . clear_dir ( ) self . collect ( )
Collect tools .
44,978
def get_data_checksum ( proc_input , proc_slug , proc_version ) : checksum = hashlib . sha256 ( ) checksum . update ( json . dumps ( proc_input , sort_keys = True ) . encode ( 'utf-8' ) ) checksum . update ( proc_slug . encode ( 'utf-8' ) ) checksum . update ( str ( proc_version ) . encode ( 'utf-8' ) ) return checksum . hexdigest ( )
Compute checksum of processor inputs name and version .
44,979
def dict_dot ( d , k , val = None , default = None ) : if val is None and k == '' : return d def set_default ( dict_or_model , key , default_value ) : if isinstance ( dict_or_model , models . Model ) : if not hasattr ( dict_or_model , key ) : setattr ( dict_or_model , key , default_value ) return getattr ( dict_or_model , key ) else : return dict_or_model . setdefault ( key , default_value ) def get_item ( dict_or_model , key ) : if isinstance ( dict_or_model , models . Model ) : return getattr ( dict_or_model , key ) else : return dict_or_model [ key ] def set_item ( dict_or_model , key , value ) : if isinstance ( dict_or_model , models . Model ) : setattr ( dict_or_model , key , value ) else : dict_or_model [ key ] = value if val is None and callable ( default ) : return functools . reduce ( lambda a , b : set_default ( a , b , default ( ) ) , k . split ( '.' ) , d ) elif val is None : return functools . reduce ( get_item , k . split ( '.' ) , d ) else : try : k , k_last = k . rsplit ( '.' , 1 ) set_item ( dict_dot ( d , k , default = dict ) , k_last , val ) except ValueError : set_item ( d , k , val ) return val
Get or set value using a dot - notation key in a multilevel dict .
44,980
def get_apps_tools ( ) : tools_paths = { } for app_config in apps . get_app_configs ( ) : proc_path = os . path . join ( app_config . path , 'tools' ) if os . path . isdir ( proc_path ) : tools_paths [ app_config . name ] = proc_path custom_tools_paths = getattr ( settings , 'RESOLWE_CUSTOM_TOOLS_PATHS' , [ ] ) if not isinstance ( custom_tools_paths , list ) : raise KeyError ( "`RESOLWE_CUSTOM_TOOLS_PATHS` setting must be a list." ) for seq , custom_path in enumerate ( custom_tools_paths ) : custom_key = '_custom_{}' . format ( seq ) tools_paths [ custom_key ] = custom_path return tools_paths
Get applications tools and their paths .
44,981
def rewire_inputs ( data_list ) : if len ( data_list ) < 2 : return data_list mapped_ids = { bundle [ 'original' ] . id : bundle [ 'copy' ] . id for bundle in data_list } for bundle in data_list : updated = False copy = bundle [ 'copy' ] for field_schema , fields in iterate_fields ( copy . input , copy . process . input_schema ) : name = field_schema [ 'name' ] value = fields [ name ] if field_schema [ 'type' ] . startswith ( 'data:' ) and value in mapped_ids : fields [ name ] = mapped_ids [ value ] updated = True elif field_schema [ 'type' ] . startswith ( 'list:data:' ) and any ( [ id_ in mapped_ids for id_ in value ] ) : fields [ name ] = [ mapped_ids [ id_ ] if id_ in mapped_ids else id_ for id_ in value ] updated = True if updated : copy . save ( ) return data_list
Rewire inputs of provided data objects .
44,982
def CreateFromDocument ( xml_text , default_namespace = None , location_base = None ) : if pyxb . XMLStyle_saxer != pyxb . _XMLStyle : dom = pyxb . utils . domutils . StringToDOM ( xml_text ) return CreateFromDOM ( dom . documentElement , default_namespace = default_namespace ) if default_namespace is None : default_namespace = Namespace . fallbackNamespace ( ) saxer = pyxb . binding . saxer . make_parser ( fallback_namespace = default_namespace , location_base = location_base ) handler = saxer . getContentHandler ( ) xmld = xml_text if isinstance ( xmld , pyxb . utils . six . text_type ) : xmld = xmld . encode ( pyxb . _InputEncoding ) saxer . parse ( io . BytesIO ( xmld ) ) instance = handler . rootObject ( ) return instance
Parse the given XML and use the document element to create a Python instance .
44,983
def CreateFromDOM ( node , default_namespace = None ) : if default_namespace is None : default_namespace = Namespace . fallbackNamespace ( ) return pyxb . binding . basis . element . AnyCreateFromDOM ( node , default_namespace )
Create a Python instance from the given DOM node . The node tag must correspond to an element declaration in this module .
44,984
def postloop ( self ) : cmd . Cmd . postloop ( self ) d1_cli . impl . util . print_info ( "Exiting..." )
Take care of any unfinished business .
44,985
def precmd ( self , line ) : line = self . prefix + line self . _history += [ line . strip ( ) ] return line
This method is called after the line has been input but before it has been interpreted .
44,986
def default ( self , line ) : args = self . _split_args ( line , 0 , 99 ) d1_cli . impl . util . print_error ( "Unknown command: {}" . format ( args [ 0 ] ) )
Called on an input line when the command prefix is not recognized .
44,987
def do_history ( self , line ) : self . _split_args ( line , 0 , 0 ) for idx , item in enumerate ( self . _history ) : d1_cli . impl . util . print_info ( "{0: 3d} {1}" . format ( idx , item ) )
history Display a list of commands that have been entered .
44,988
def do_eof ( self , line ) : d1_cli . impl . util . print_info ( "" ) self . do_exit ( line )
Exit on system EOF character .
44,989
def do_reset ( self , line ) : self . _split_args ( line , 0 , 0 ) self . _command_processor . get_session ( ) . reset ( ) self . _print_info_if_verbose ( "Successfully reset session variables" )
reset Set all session variables to their default values .
44,990
def do_clearaccess ( self , line ) : self . _split_args ( line , 0 , 0 ) self . _command_processor . get_session ( ) . get_access_control ( ) . clear ( ) self . _print_info_if_verbose ( "Removed all subjects from access policy" )
clearaccess Remove all subjects from access policy Only the submitter will have access to the object .
44,991
def do_allowrep ( self , line ) : self . _split_args ( line , 0 , 0 ) self . _command_processor . get_session ( ) . get_replication_policy ( ) . set_replication_allowed ( True ) self . _print_info_if_verbose ( "Set replication policy to allow replication" )
allowrep Allow new objects to be replicated .
44,992
def do_denyrep ( self , line ) : self . _split_args ( line , 0 , 0 ) self . _command_processor . get_session ( ) . get_replication_policy ( ) . set_replication_allowed ( False ) self . _print_info_if_verbose ( "Set replication policy to deny replication" )
denyrep Prevent new objects from being replicated .
44,993
def do_clearrep ( self , line ) : self . _split_args ( line , 0 , 0 ) self . _command_processor . get_session ( ) . get_replication_policy ( ) . clear ( ) self . _print_info_if_verbose ( "Cleared the replication policy" )
clearrep Set the replication policy to default .
44,994
def do_queue ( self , line ) : self . _split_args ( line , 0 , 0 ) self . _command_processor . get_operation_queue ( ) . display ( )
queue Print the queue of write operations .
44,995
def do_run ( self , line ) : self . _split_args ( line , 0 , 0 ) self . _command_processor . get_operation_queue ( ) . execute ( ) self . _print_info_if_verbose ( "All operations in the write queue were successfully executed" )
run Perform each operation in the queue of write operations .
44,996
def do_edit ( self , line ) : self . _split_args ( line , 0 , 0 ) self . _command_processor . get_operation_queue ( ) . edit ( ) self . _print_info_if_verbose ( "The write operation queue was successfully edited" )
edit Edit the queue of write operations .
44,997
def do_clearqueue ( self , line ) : self . _split_args ( line , 0 , 0 ) self . _command_processor . get_operation_queue ( ) . clear ( ) self . _print_info_if_verbose ( "All operations in the write queue were cleared" )
clearqueue Remove the operations in the queue of write operations without performing them .
44,998
def _group_groups ( perm_list ) : perm_list = sorted ( perm_list , key = lambda tup : tup [ 0 ] ) grouped_perms = [ ] for key , group in groupby ( perm_list , lambda tup : ( tup [ 0 ] , tup [ 1 ] ) ) : grouped_perms . append ( ( key [ 0 ] , key [ 1 ] , [ g [ 2 ] for g in group ] ) ) return grouped_perms
Group permissions by group .
44,999
def get_user_group_perms ( user_or_group , obj ) : user , group = get_identity ( user_or_group ) if user and not user . is_active : return [ ] , [ ] user_model = get_user_model ( ) ctype = ContentType . objects . get_for_model ( obj ) group_model = get_group_obj_perms_model ( obj ) group_rel_name = group_model . permission . field . related_query_name ( ) if user : user_rel_name = user_model . groups . field . related_query_name ( ) group_filters = { user_rel_name : user } else : group_filters = { 'pk' : group . pk } if group_model . objects . is_generic ( ) : group_filters . update ( { '{}__content_type' . format ( group_rel_name ) : ctype , '{}__object_pk' . format ( group_rel_name ) : obj . pk , } ) else : group_filters [ '{}__content_object' . format ( group_rel_name ) ] = obj user_perms , group_perms = [ ] , [ ] if user : perms_qs = Permission . objects . filter ( content_type = ctype ) if user . is_superuser : user_perms = list ( chain ( perms_qs . values_list ( "codename" , flat = True ) ) ) else : model = get_user_obj_perms_model ( obj ) related_name = model . permission . field . related_query_name ( ) user_filters = { '{}__user' . format ( related_name ) : user } if model . objects . is_generic ( ) : user_filters . update ( { '{}__content_type' . format ( related_name ) : ctype , '{}__object_pk' . format ( related_name ) : obj . pk , } ) else : user_filters [ '{}__content_object' . format ( related_name ) ] = obj user_perms_qs = perms_qs . filter ( ** user_filters ) user_perms = list ( chain ( user_perms_qs . values_list ( "codename" , flat = True ) ) ) group_perms_qs = Group . objects . filter ( ** group_filters ) group_perms = list ( chain ( group_perms_qs . order_by ( "pk" ) . values_list ( "pk" , "name" , "{}__permission__codename" . format ( group_rel_name ) ) ) ) group_perms = _group_groups ( group_perms ) return user_perms , group_perms
Get permissins for user groups .