idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
4,300
def get_basic_sort_link ( self , request , field ) : query_string = self . get_querystring ( ) sort_string = self . get_next_sort_string ( field ) if sort_string : sort_link = request . path + '?' + sort_string if query_string : sort_link += '&' + query_string else : sort_link = request . path if query_string : sort_link += '?' + query_string return sort_link
Thanks to del_query_parameters and get_querystring we build the link with preserving interesting get parameters and removing the others
107
26
4,301
def build_thumb_path ( self , image ) : image_file = image . file image_name_w_ext = split ( image . name ) [ - 1 ] image_name , ext = splitext ( image_name_w_ext ) if not self . in_memory ( image_file ) : # `image_file` is already in disk (not in memory). # `image_name` is the full path, not just the name image_name = image_name . split ( '/' ) [ - 1 ] upload_to = image . field . upload_to if not upload_to . endswith ( '/' ) : upload_to = f'{upload_to}/' path_upload_to = f'{upload_to}{image_name}' return f'{self.storage.location}/{path_upload_to}{THUMB_EXT}{ext}'
Build the absolute path of the to - be - saved thumbnail .
199
13
4,302
def run ( self , * * options ) : shutdown_message = options . get ( 'shutdown_message' , '' ) self . stdout . write ( "Performing system checks...\n\n" ) self . check ( display_num_errors = True ) self . check_migrations ( ) now = datetime . datetime . now ( ) . strftime ( r'%B %d, %Y - %X' ) if six . PY2 : now = now . decode ( get_system_encoding ( ) ) self . stdout . write ( now ) addr , port = self . addr , self . port addr = '[{}]' . format ( addr ) if self . _raw_ipv6 else addr runner = GunicornRunner ( addr , port , options ) try : runner . run ( ) except KeyboardInterrupt : runner . shutdown ( ) if shutdown_message : self . stdout . write ( shutdown_message ) sys . exit ( 0 ) except : runner . shutdown ( ) raise
Override runserver s entry point to bring Gunicorn on .
220
13
4,303
def _plot ( x , mph , mpd , threshold , edge , valley , ax , ind ) : try : import matplotlib . pyplot as plt except ImportError : print ( 'matplotlib is not available.' ) else : if ax is None : _ , ax = plt . subplots ( 1 , 1 , figsize = ( 8 , 4 ) ) ax . plot ( x , 'b' , lw = 1 ) if ind . size : label = 'valley' if valley else 'peak' label = label + 's' if ind . size > 1 else label ax . plot ( ind , x [ ind ] , '+' , mfc = None , mec = 'r' , mew = 2 , ms = 8 , label = '%d %s' % ( ind . size , label ) ) ax . legend ( loc = 'best' , framealpha = .5 , numpoints = 1 ) ax . set_xlim ( - .02 * x . size , x . size * 1.02 - 1 ) ymin , ymax = x [ np . isfinite ( x ) ] . min ( ) , x [ np . isfinite ( x ) ] . max ( ) yrange = ymax - ymin if ymax > ymin else 1 ax . set_ylim ( ymin - 0.1 * yrange , ymax + 0.1 * yrange ) ax . set_xlabel ( 'Data #' , fontsize = 14 ) ax . set_ylabel ( 'Amplitude' , fontsize = 14 ) mode = 'Valley detection' if valley else 'Peak detection' ax . set_title ( "%s (mph=%s, mpd=%d, threshold=%s, edge='%s')" % ( mode , str ( mph ) , mpd , str ( threshold ) , edge ) )
Plot results of the detect_peaks function see its help .
409
13
4,304
def assignees ( self ) : if 'assignees' in self . _json_data and self . _json_data . get ( 'assignees_ids' ) == list ( ) : return [ ] elif 'assignees' in self . _json_data and self . _json_data . get ( 'assignees_ids' ) : assignees_ids_str = ',' . join ( [ str ( id ) for id in self . _json_data . get ( 'assignees_ids' ) ] ) return self . _client . users ( id__in = assignees_ids_str , is_hidden = False ) return None
List of assignees to the activity .
145
8
4,305
def is_rootlevel ( self ) : # when the activity itself is a root, than return False immediately if self . is_root ( ) : return False parent_name = None parent_dict = self . _json_data . get ( 'parent_id_name' ) if parent_dict and 'name' in parent_dict : parent_name = parent_dict . get ( 'name' ) if not parent_dict : parent_name = self . _client . activity ( id = self . _json_data . get ( 'parent_id' ) ) . name if parent_name in ActivityRootNames . values ( ) : return True return False
Determine if the Activity is at the root level of a project .
140
15
4,306
def parent ( self ) : parent_id = self . _json_data . get ( 'parent_id' ) if parent_id is None : raise NotFoundError ( "Cannot find subprocess for this task '{}', " "as this task exist on top level." . format ( self . name ) ) return self . _client . activity ( pk = parent_id , scope = self . scope_id )
Retrieve the parent in which this activity is defined .
91
11
4,307
def siblings ( self , * * kwargs ) : parent_id = self . _json_data . get ( 'parent_id' ) if parent_id is None : raise NotFoundError ( "Cannot find subprocess for this task '{}', " "as this task exist on top level." . format ( self . name ) ) return self . _client . activities ( parent_id = parent_id , scope = self . scope_id , * * kwargs )
Retrieve the other activities that also belong to the parent .
104
12
4,308
def download_as_pdf ( self , target_dir = None , pdf_filename = None , paper_size = PaperSize . A4 , paper_orientation = PaperOrientation . PORTRAIT , include_appendices = False ) : if not pdf_filename : pdf_filename = self . name + '.pdf' if not pdf_filename . endswith ( '.pdf' ) : pdf_filename += '.pdf' full_path = os . path . join ( target_dir or os . getcwd ( ) , pdf_filename ) request_params = { 'papersize' : paper_size , 'orientation' : paper_orientation , 'appendices' : include_appendices } url = self . _client . _build_url ( 'activity_export' , activity_id = self . id ) response = self . _client . _request ( 'GET' , url , params = request_params ) if response . status_code != requests . codes . ok : # pragma: no cover raise APIError ( "Could not download PDF of activity {}" . format ( self . name ) ) # If appendices are included, the request becomes asynchronous if include_appendices : data = response . json ( ) # Download the pdf async url = urljoin ( self . _client . api_root , data [ 'download_url' ] ) count = 0 while count <= ASYNC_TIMEOUT_LIMIT : response = self . _client . _request ( 'GET' , url = url ) if response . status_code == requests . codes . ok : # pragma: no cover with open ( full_path , 'wb' ) as f : for chunk in response . iter_content ( 1024 ) : f . write ( chunk ) return count += ASYNC_REFRESH_INTERVAL time . sleep ( ASYNC_REFRESH_INTERVAL ) raise APIError ( "Could not download PDF of activity {} within the time-out limit of {} " "seconds" . format ( self . name , ASYNC_TIMEOUT_LIMIT ) ) with open ( full_path , 'wb' ) as f : for chunk in response . iter_content ( 1024 ) : f . write ( chunk )
Retrieve the PDF of the Activity .
482
8
4,309
def parse ( argv ) : args = docopt ( __doc__ , argv = argv ) try : call ( sys . argv [ 2 ] , args ) except KytosException as exception : print ( "Error parsing args: {}" . format ( exception ) ) exit ( )
Parse cli args .
62
6
4,310
def run ( self ) : super ( ) . run ( ) call ( 'rm -vrf ./build ./dist ./*.egg-info' , shell = True ) call ( 'find . -name __pycache__ -type d | xargs rm -rf' , shell = True ) call ( 'test -d docs && make -C docs/ clean' , shell = True )
Clean build dist pyc and egg from package and docs .
82
12
4,311
def run ( self ) : print ( 'Yala is running. It may take several seconds...' ) try : check_call ( 'yala setup.py tests kytos' , shell = True ) print ( 'No linter error found.' ) except CalledProcessError : print ( 'Linter check failed. Fix the error(s) above and try again.' ) sys . exit ( - 1 )
Run yala .
86
4
4,312
def allow ( self ) : with self . selenium . context ( self . selenium . CONTEXT_CHROME ) : self . find_primary_button ( ) . click ( )
Allow the add - on to be installed .
41
9
4,313
def addon_name ( self ) : with self . selenium . context ( self . selenium . CONTEXT_CHROME ) : el = self . find_description ( ) return el . find_element ( By . CSS_SELECTOR , "b" ) . text
Provide access to the add - on name .
59
10
4,314
def cancel ( self ) : with self . selenium . context ( self . selenium . CONTEXT_CHROME ) : self . find_secondary_button ( ) . click ( )
Cancel add - on install .
41
7
4,315
def install ( self ) : with self . selenium . context ( self . selenium . CONTEXT_CHROME ) : self . find_primary_button ( ) . click ( )
Confirm add - on install .
41
7
4,316
def _load_txt ( file , devices , channels , header , * * kwargs ) : # %%%%%%%%%%%%%%%%%%%%%%%%%%% Exclusion of invalid keywords %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% kwargs_txt = _filter_keywords ( numpy . loadtxt , kwargs ) # %%%%%%%%%%%%%%%%%%%%%%%%%% Columns of the selected channels %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% out_dict = { } for dev_nbr , device in enumerate ( devices ) : out_dict [ device ] = { } columns = [ ] for chn in channels [ dev_nbr ] : columns . append ( header [ device ] [ "column labels" ] [ chn ] ) # header[device]["column labels"] contains the column of .txt file where the data of # channel "chn" is located. out_dict [ device ] [ "CH" + str ( chn ) ] = numpy . loadtxt ( fname = file , usecols = header [ device ] [ "column labels" ] [ chn ] , * * kwargs_txt ) return out_dict
Function used for reading . txt files generated by OpenSignals .
243
14
4,317
def _load_h5 ( file , devices , channels ) : # %%%%%%%%%%%%%%%%%%%%%%%%%%%% Creation of h5py object %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% h5_object = h5py . File ( file ) # %%%%%%%%%%%%%%%%%%%%%%%%% Data of the selected channels %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% out_dict = { } for dev_nbr , device in enumerate ( devices ) : out_dict [ device ] = { } for chn in channels [ dev_nbr ] : data_temp = list ( h5_object . get ( device ) . get ( "raw" ) . get ( "channel_" + str ( chn ) ) ) # Conversion of a nested list to a flatten list by list-comprehension # The following line is equivalent to: # for sublist in h5_data: # for item in sublist: # flat_list.append(item) #out_dict[device]["CH" + str(chn)] = [item for sublist in data_temp for item in sublist] out_dict [ device ] [ "CH" + str ( chn ) ] = numpy . concatenate ( data_temp ) return out_dict
Function used for reading . h5 files generated by OpenSignals .
272
14
4,318
def _check_chn_type ( channels , available_channels ) : # ------------------------ Definition of constants and variables ------------------------------- chn_list_standardized = [ ] # %%%%%%%%%%%%%%%%%%%%%%%%%%% Fill of "chn_list_standardized" %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% devices = list ( available_channels . keys ( ) ) for dev_nbr , device in enumerate ( devices ) : if channels is not None : sub_unit = channels [ dev_nbr ] for channel in sub_unit : # Each sublist must be composed by integers. if channel in available_channels [ devices [ dev_nbr ] ] : continue else : raise RuntimeError ( "At least one of the specified channels is not available in " "the acquisition file." ) chn_list_standardized . append ( sub_unit ) else : # By omission all the channels were selected. chn_list_standardized . append ( available_channels [ device ] ) return chn_list_standardized
Function used for checking weather the elements in channels input are coincident with the available channels .
220
18
4,319
def _available_channels ( devices , header ) : # ------------------------ Definition of constants and variables ------------------------------ chn_dict = { } # %%%%%%%%%%%%%%%%%%%%%% Access to the relevant data in the header %%%%%%%%%%%%%%%%%%%%%%%%%%%% for dev in devices : chn_dict [ dev ] = header [ dev ] [ "column labels" ] . keys ( ) return chn_dict
Function used for the determination of the available channels in each device .
86
13
4,320
def _check_dev_type ( devices , dev_list ) : if devices is not None : for device in devices : if device in dev_list : # List element is one of the available devices. continue else : raise RuntimeError ( "At least one of the specified devices is not available in the " "acquisition file." ) out = devices else : out = dev_list return out
Function used for checking weather the devices field only contain devices used during the acquisition .
82
16
4,321
def _file_type ( file ) : # %%%%%%%%%%%%%%%%%%%%%%%%%%%%% Verification of file type %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% if "." in file : # File with known extension. file_type = file . split ( "." ) [ - 1 ] else : # File without known extension. file_type = magic . from_file ( file , mime = True ) . split ( "/" ) [ - 1 ] return file_type
Function intended for identification of the file type .
99
9
4,322
def team ( self ) : team_dict = self . _json_data . get ( 'team' ) if team_dict and team_dict . get ( 'id' ) : return self . _client . team ( id = team_dict . get ( 'id' ) ) else : return None
Team to which the scope is assigned .
64
8
4,323
def parts ( self , * args , * * kwargs ) : return self . _client . parts ( * args , bucket = self . bucket . get ( 'id' ) , * * kwargs )
Retrieve parts belonging to this scope .
45
8
4,324
def part ( self , * args , * * kwargs ) : return self . _client . part ( * args , bucket = self . bucket . get ( 'id' ) , * * kwargs )
Retrieve a single part belonging to this scope .
45
10
4,325
def create_model ( self , parent , name , multiplicity = Multiplicity . ZERO_MANY ) : return self . _client . create_model ( parent , name , multiplicity = multiplicity )
Create a single part model in this scope .
45
9
4,326
def model ( self , * args , * * kwargs ) : return self . _client . model ( * args , bucket = self . bucket . get ( 'id' ) , * * kwargs )
Retrieve a single model belonging to this scope .
45
10
4,327
def activities ( self , * args , * * kwargs ) : if self . _client . match_app_version ( label = 'wim' , version = '<2.0.0' , default = True ) : return self . _client . activities ( * args , scope = self . id , * * kwargs ) else : return self . _client . activities ( * args , scope_id = self . id , * * kwargs )
Retrieve activities belonging to this scope .
100
8
4,328
def create_activity ( self , * args , * * kwargs ) : if self . _client . match_app_version ( label = 'wim' , version = '<2.0.0' , default = True ) : return self . _client . create_activity ( self . process , * args , * * kwargs ) else : return self . _client . create_activity ( self . workflow_root , * args , * * kwargs )
Create a new activity belonging to this scope .
102
9
4,329
def create_service ( self , * args , * * kwargs ) : return self . _client . create_service ( * args , scope = self . id , * * kwargs )
Create a service to current scope .
42
7
4,330
def service ( self , * args , * * kwargs ) : return self . _client . service ( * args , scope = self . id , * * kwargs )
Retrieve a single service belonging to this scope .
38
10
4,331
def service_execution ( self , * args , * * kwargs ) : return self . _client . service_execution ( * args , scope = self . id , * * kwargs )
Retrieve a single service execution belonging to this scope .
44
11
4,332
def members ( self , is_manager = None ) : if not is_manager : return [ member for member in self . _json_data [ 'members' ] if member [ 'is_active' ] ] else : return [ member for member in self . _json_data [ 'members' ] if member . get ( 'is_active' , False ) and member . get ( 'is_manager' , False ) ]
Retrieve members of the scope .
91
7
4,333
def add_member ( self , member ) : select_action = 'add_member' self . _update_scope_project_team ( select_action = select_action , user = member , user_type = 'member' )
Add a single member to the scope .
50
8
4,334
def remove_member ( self , member ) : select_action = 'remove_member' self . _update_scope_project_team ( select_action = select_action , user = member , user_type = 'member' )
Remove a single member to the scope .
50
8
4,335
def add_manager ( self , manager ) : select_action = 'add_manager' self . _update_scope_project_team ( select_action = select_action , user = manager , user_type = 'manager' )
Add a single manager to the scope .
50
8
4,336
def remove_manager ( self , manager ) : select_action = 'remove_manager' self . _update_scope_project_team ( select_action = select_action , user = manager , user_type = 'manager' )
Remove a single manager to the scope .
50
8
4,337
def _update_scope_project_team ( self , select_action , user , user_type ) : if isinstance ( user , str ) : users = self . _client . _retrieve_users ( ) manager_object = next ( ( item for item in users [ 'results' ] if item [ "username" ] == user ) , None ) if manager_object : url = self . _client . _build_url ( 'scope' , scope_id = self . id ) r = self . _client . _request ( 'PUT' , url , params = { 'select_action' : select_action } , data = { 'user_id' : manager_object [ 'pk' ] } ) if r . status_code != requests . codes . ok : # pragma: no cover raise APIError ( "Could not {} {} in Scope" . format ( select_action . split ( '_' ) [ 0 ] , user_type ) ) else : raise NotFoundError ( "User {} does not exist" . format ( user ) ) else : raise TypeError ( "User {} should be defined as a string" . format ( user ) )
Update the Project Team of the Scope . Updates include addition or removing of managers or members .
250
18
4,338
def clone ( self , * args , * * kwargs ) : return self . _client . clone_scope ( * args , source_scope = self , * * kwargs )
Clone current scope .
40
5
4,339
def name ( self ) -> str : if self . is_platform : if self . _data [ "publicCode" ] : return self . _data [ 'name' ] + " Platform " + self . _data [ "publicCode" ] else : return self . _data [ 'name' ] + " Platform " + self . place_id . split ( ':' ) [ - 1 ] else : return self . _data [ 'name' ]
Friendly name for the stop place or platform
96
9
4,340
def remove ( self , value , _sa_initiator = None ) : key = self . keyfunc ( value ) # Let self[key] raise if key is not in this collection # testlib.pragma exempt:__ne__ if not self . __contains__ ( key ) or value not in self [ key ] : raise sa_exc . InvalidRequestError ( "Can not remove '%s': collection holds '%s' for key '%s'. " "Possible cause: is the MappedCollection key function " "based on mutable properties or properties that only obtain " "values after flush?" % ( value , self [ key ] , key ) ) self . __getitem__ ( key , _sa_initiator ) . remove ( value )
Remove an item by value consulting the keyfunc for the key .
165
13
4,341
def progressive ( image_field , alt_text = '' ) : if not isinstance ( image_field , ImageFieldFile ) : raise ValueError ( '"image_field" argument must be an ImageField.' ) for engine in engines . all ( ) : if isinstance ( engine , BaseEngine ) and hasattr ( engine , 'env' ) : env = engine . env if isinstance ( env , Environment ) : context = render_progressive_field ( image_field , alt_text ) template = env . get_template ( 'progressiveimagefield/render_field.html' ) rendered = template . render ( * * context ) return Markup ( rendered ) return ''
Used as a Jinja2 filter this function returns a safe HTML chunk .
145
15
4,342
def get_form ( self , form_class = None ) : form = super ( ) . get_form ( form_class ) if self . _save : make_form_or_formset_fields_not_required ( form ) return form
If the task was only saved treat all form fields as not required .
53
14
4,343
def save_task ( self ) : task = self . request . activation . task task . status = STATUS . ASSIGNED task . save ( )
Transition to save the task and return to ASSIGNED state .
32
14
4,344
def activation_done ( self , * args , * * kwargs ) : if self . _save : self . save_task ( ) else : super ( ) . activation_done ( * args , * * kwargs )
Complete the activation or save only depending on form submit .
49
11
4,345
def niplot ( ) : fig = gcf ( ) cid = fig . canvas . mpl_connect ( 'key_press_event' , # @UnusedVariable on_key_press ) cid = fig . canvas . mpl_connect ( 'key_release_event' , # @UnusedVariable on_key_release ) cid = fig . canvas . mpl_connect ( 'scroll_event' , zoom )
This script extends the native matplolib keyboard bindings . This script allows to use the up down left and right keys to move the visualization window . Zooming can be performed using the + and - keys . Finally the scroll wheel can be used to zoom under cursor .
95
54
4,346
def acquire_subsamples_gp1 ( input_data , file_name = None ) : # Generation of the HTML file where the plot will be stored. #file_name = _generate_bokeh_file(file_name) # Number of acquired samples (Original sample_rate = 4000 Hz) fs_orig = 4000 nbr_samples_orig = len ( input_data ) data_interp = { "4000" : { } } data_interp [ "4000" ] [ "data" ] = input_data data_interp [ "4000" ] [ "time" ] = numpy . linspace ( 0 , nbr_samples_orig / fs_orig , nbr_samples_orig ) # Constants time_orig = data_interp [ "4000" ] [ "time" ] data_orig = data_interp [ "4000" ] [ "data" ] # ============ Interpolation of data accordingly to the desired sampling frequency ============ # sample_rate in [3000, 1000, 500, 200, 100] - Some of the available sample frequencies at Plux # acquisition systems # sample_rate in [50, 20] - Non-functional sampling frequencies (Not available at Plux devices # because of their limited application) for sample_rate in [ 3000 , 1000 , 500 , 200 , 100 , 50 , 20 ] : fs_str = str ( sample_rate ) nbr_samples_interp = int ( ( nbr_samples_orig * sample_rate ) / fs_orig ) data_interp [ fs_str ] = { } data_interp [ fs_str ] [ "time" ] = numpy . linspace ( 0 , nbr_samples_orig / fs_orig , nbr_samples_interp ) data_interp [ fs_str ] [ "data" ] = numpy . interp ( data_interp [ fs_str ] [ "time" ] , time_orig , data_orig ) # List that store the figure handler. list_figures = [ ] # Generation of Bokeh Figures. for iter_nbr , sample_rate in enumerate ( [ "4000" , "3000" , "1000" , "500" , "200" , "100" ] ) : # If figure number is a multiple of 3 or if we are generating the first figure... if iter_nbr == 0 or iter_nbr % 2 == 0 : list_figures . append ( [ ] ) # Plotting phase. list_figures [ - 1 ] . append ( figure ( x_axis_label = 'Time (s)' , y_axis_label = 'Raw Data' , title = "Sampling Frequency: " + sample_rate + " Hz" , * * opensignals_kwargs ( "figure" ) ) ) list_figures [ - 1 ] [ - 1 ] . line ( data_interp [ sample_rate ] [ "time" ] [ : int ( sample_rate ) ] , data_interp [ sample_rate ] [ "data" ] [ : int ( sample_rate ) ] , * * opensignals_kwargs ( "line" ) )
Function invoked for plotting a grid - plot with 3x2 format showing the differences in ECG signals accordingly to the chosen sampling frequency .
696
27
4,347
def download ( link , out ) : # [Source: https://stackoverflow.com/questions/7243750/download-file-from-web-in-python-3] r = requests . get ( link ) with open ( out , 'wb' ) as outfile : outfile . write ( r . content )
Downloading data from websites such as previously acquired physiological signals is an extremely relevant task taking into consideration that without data processing cannot take place .
72
27
4,348
def argrelmin ( data , axis = 0 , order = 1 , mode = 'clip' ) : return argrelextrema ( data , np . less , axis , order , mode )
Calculate the relative minima of data .
42
10
4,349
def argrelmax ( data , axis = 0 , order = 1 , mode = 'clip' ) : return argrelextrema ( data , np . greater , axis , order , mode )
Calculate the relative maxima of data .
42
10
4,350
def peaks ( signal , tol = None ) : if ( tol is None ) : tol = min ( signal ) pks = argrelmax ( clip ( signal , tol , signal . max ( ) ) ) return pks [ 0 ]
This function detects all the peaks of a signal and returns those time positions . To reduce the amount of peaks detected a threshold is introduced so only the peaks above that value are considered .
53
36
4,351
def get_project ( url = None , username = None , password = None , token = None , scope = None , scope_id = None , env_filename = None , status = ScopeStatus . ACTIVE ) : if env . bool ( kecenv . KECHAIN_FORCE_ENV_USE , default = False ) : if not os . getenv ( kecenv . KECHAIN_URL ) : raise ClientError ( "Error: KECHAIN_URL should be provided as environment variable (use of env vars is enforced)" ) if not ( os . getenv ( kecenv . KECHAIN_TOKEN ) or ( os . getenv ( kecenv . KECHAIN_PASSWORD ) and os . getenv ( kecenv . KECHAIN_PASSWORD ) ) ) : raise ClientError ( "Error: KECHAIN_TOKEN or KECHAIN_USERNAME and KECHAIN_PASSWORD should be provided as " "environment variable(s) (use of env vars is enforced)" ) if not ( os . getenv ( kecenv . KECHAIN_SCOPE ) or os . getenv ( kecenv . KECHAIN_SCOPE_ID ) ) : raise ClientError ( "Error: KECHAIN_SCOPE or KECHAIN_SCOPE_ID should be provided as environment variable " "(use of env vars is enforced)" ) if env . bool ( kecenv . KECHAIN_FORCE_ENV_USE , default = False ) or not any ( ( url , username , password , token , scope , scope_id ) ) : client = Client . from_env ( env_filename = env_filename ) scope_id = env ( kecenv . KECHAIN_SCOPE_ID , default = None ) scope = env ( kecenv . KECHAIN_SCOPE , default = None ) status = env ( kecenv . KECHAIN_SCOPE_STATUS , default = None ) elif ( url and ( ( username and password ) or ( token ) ) and ( scope or scope_id ) ) and not env . bool ( kecenv . KECHAIN_FORCE_ENV_USE , default = False ) : client = Client ( url = url ) client . login ( username = username , password = password , token = token ) else : raise ClientError ( "Error: insufficient arguments to connect to KE-chain. " "See documentation of `pykechain.get_project()`" ) if scope_id : return client . scope ( pk = scope_id , status = status ) else : return client . scope ( name = scope , status = status )
Retrieve and return the KE - chain project to be used throughout an app .
596
16
4,352
def _rebuild_key_ids ( self ) : self . _key_ids = collections . defaultdict ( list ) for i , x in enumerate ( self . _pairs ) : self . _key_ids [ x [ 0 ] ] . append ( i )
Rebuild the internal key to index mapping .
58
9
4,353
def iteritems ( self ) : keys_yielded = set ( ) for k , v in self . _pairs : if k not in keys_yielded : keys_yielded . add ( k ) yield k , v
Iterator across all the non - duplicate keys and their values . Only yields the first key of duplicates .
51
21
4,354
def _update ( self , resource , update_dict = None , params = None , * * kwargs ) : url = self . _client . _build_url ( resource , * * kwargs ) response = self . _client . _request ( 'PUT' , url , json = update_dict , params = params ) if response . status_code != requests . codes . ok : # pragma: no cover raise APIError ( "Could not update {} ({})" . format ( self . __class__ . __name__ , response . json ( ) . get ( 'results' ) ) ) else : self . refresh ( )
Update the object .
136
4
4,355
def members ( self , role = None ) : if role and role not in TeamRoles . values ( ) : raise IllegalArgumentError ( "role should be one of `TeamRoles` {}, got '{}'" . format ( TeamRoles . values ( ) , role ) ) member_list = self . _json_data . get ( 'members' ) if role : return [ teammember for teammember in member_list if teammember . get ( 'role' ) == role ] else : return member_list
Members of the team .
112
5
4,356
def scopes ( self , * * kwargs ) : return self . _client . scopes ( team = self . id , * * kwargs )
Scopes associated to the team .
34
7
4,357
def insert_hash ( path : Path , content : Union [ str , bytes ] , * , hash_length = 7 , hash_algorithm = hashlib . md5 ) : if isinstance ( content , str ) : content = content . encode ( ) hash_ = hash_algorithm ( content ) . hexdigest ( ) [ : hash_length ] if '.' in path . name : new_name = re . sub ( r'\.' , f'.{hash_}.' , path . name , count = 1 ) else : new_name = f'{path.name}.{hash_}' return path . with_name ( new_name )
Insert a hash based on the content into the path after the first dot .
142
15
4,358
def options ( cls ) : return sorted ( ( value , name ) for ( name , value ) in cls . __dict__ . items ( ) if not name . startswith ( '__' ) )
Provide a sorted list of options .
45
8
4,359
def navbar ( self ) : window = BaseWindow ( self . selenium , self . selenium . current_window_handle ) with self . selenium . context ( self . selenium . CONTEXT_CHROME ) : el = self . selenium . find_element ( * self . _nav_bar_locator ) return NavBar ( window , el )
Provide access to the Navigation Bar .
83
8
4,360
def notification ( self ) : with self . selenium . context ( self . selenium . CONTEXT_CHROME ) : try : root = self . selenium . find_element ( * self . _notification_locator ) return BaseNotification . create ( self , root ) except NoSuchElementException : pass try : notifications = self . selenium . find_elements ( * self . _app_menu_notification_locator ) root = next ( n for n in notifications if n . is_displayed ( ) ) return BaseNotification . create ( self , root ) except StopIteration : pass return None
Provide access to the currently displayed notification .
137
9
4,361
def wait_for_notification ( self , notification_class = BaseNotification ) : if notification_class : if notification_class is BaseNotification : message = "No notification was shown." else : message = "{0} was not shown." . format ( notification_class . __name__ ) self . wait . until ( lambda _ : isinstance ( self . notification , notification_class ) , message = message , ) return self . notification else : self . wait . until ( lambda _ : self . notification is None , message = "Unexpected notification shown." , )
Wait for the specified notification to be displayed .
120
9
4,362
def open_window ( self , private = False ) : handles_before = self . selenium . window_handles self . switch_to ( ) with self . selenium . context ( self . selenium . CONTEXT_CHROME ) : # Opens private or non-private window self . selenium . find_element ( * self . _file_menu_button_locator ) . click ( ) if private : self . selenium . find_element ( * self . _file_menu_private_window_locator ) . click ( ) else : self . selenium . find_element ( * self . _file_menu_new_window_button_locator ) . click ( ) return self . wait . until ( expected . new_browser_window_is_opened ( self . selenium , handles_before ) , message = "No new browser window opened" , )
Open a new browser window .
197
6
4,363
def to_serializable_dict ( self , attrs_to_serialize = None , rels_to_expand = None , rels_to_serialize = None , key_modifications = None ) : return self . todict ( attrs_to_serialize = attrs_to_serialize , rels_to_expand = rels_to_expand , rels_to_serialize = rels_to_serialize , key_modifications = key_modifications )
An alias for todict
112
5
4,364
def serialize_attrs ( self , * args ) : # return dict([(a, getattr(self, a)) for a in args]) cls = type ( self ) result = { } # result = { # a: getattr(self, a) # for a in args # if hasattr(cls, a) and # a not in cls.attrs_forbidden_for_serialization() # } for a in args : if hasattr ( cls , a ) and a not in cls . attrs_forbidden_for_serialization ( ) : val = getattr ( self , a ) if is_list_like ( val ) : result [ a ] = list ( val ) else : result [ a ] = val return result
Converts and instance to a dictionary with only the specified attributes as keys
165
14
4,365
def fundamental_frequency ( s , FS ) : # TODO: review fundamental frequency to guarantee that f0 exists # suggestion peak level should be bigger # TODO: explain code s = s - mean ( s ) f , fs = plotfft ( s , FS , doplot = False ) #fs = smooth(fs, 50.0) fs = fs [ 1 : int ( len ( fs ) / 2 ) ] f = f [ 1 : int ( len ( f ) / 2 ) ] cond = find ( f > 0.5 ) [ 0 ] bp = bigPeaks ( fs [ cond : ] , 0 ) if bp == [ ] : f0 = 0 else : bp = bp + cond f0 = f [ min ( bp ) ] return f0
Compute fundamental frequency along the specified axes .
167
9
4,366
def max_frequency ( sig , FS ) : f , fs = plotfft ( sig , FS , doplot = False ) t = cumsum ( fs ) ind_mag = find ( t > t [ - 1 ] * 0.95 ) [ 0 ] f_max = f [ ind_mag ] return f_max
Compute max frequency along the specified axes .
71
9
4,367
def median_frequency ( sig , FS ) : f , fs = plotfft ( sig , FS , doplot = False ) t = cumsum ( fs ) ind_mag = find ( t > t [ - 1 ] * 0.50 ) [ 0 ] f_median = f [ ind_mag ] return f_median
Compute median frequency along the specified axes .
73
9
4,368
def call ( subcommand , args ) : args [ '<napp>' ] = parse_napps ( args [ '<napp>' ] ) func = getattr ( NAppsAPI , subcommand ) func ( args )
Call a subcommand passing the args .
50
8
4,369
def parse_napp ( napp_id ) : # `napp_id` regex, composed by two mandatory parts (username, napp_name) # and one optional (version). # username and napp_name need to start with a letter, are composed of # letters, numbers and uderscores and must have at least three characters. # They are separated by a colon. # version is optional and can take any format. Is is separated by a hyphen, # if a version is defined. regex = r'([a-zA-Z][a-zA-Z0-9_]{2,})/([a-zA-Z][a-zA-Z0-9_]{2,}):?(.+)?' compiled_regex = re . compile ( regex ) matched = compiled_regex . fullmatch ( napp_id ) if not matched : msg = '"{}" NApp has not the form username/napp_name[:version].' raise KytosException ( msg . format ( napp_id ) ) return matched . groups ( )
Convert a napp_id in tuple with username napp name and version .
241
17
4,370
def _generate_notebook_header ( notebook_object , notebook_type , notebook_title = "Notebook Title" , tags = "tags" , difficulty_stars = 1 , notebook_description = "Notebook Description" ) : # ============================= Creation of Header ==================================== header_temp = HEADER_ALL_CATEGORIES . replace ( "header_image_color_i" , "header_image_color_" + str ( NOTEBOOK_KEYS [ notebook_type ] ) ) header_temp = header_temp . replace ( "header_image_i" , "header_image_" + str ( NOTEBOOK_KEYS [ notebook_type ] ) ) header_temp = header_temp . replace ( "Notebook Title" , notebook_title ) notebook_object [ "cells" ] . append ( nb . v4 . new_markdown_cell ( header_temp , * * { "metadata" : { "tags" : [ "intro_info_title" ] } } ) ) # =============== Inclusion of the div with "Difficulty" and "Tags" =================== tags_and_diff = HEADER_TAGS . replace ( '<td class="shield_right" id="tags">tags</td>' , '<td class="shield_right" id="tags">' + "&#9729;" . join ( tags ) + '</td>' ) for star in range ( 1 , 6 ) : if star <= difficulty_stars : tags_and_diff = tags_and_diff . replace ( "fa fa-star " + str ( star ) , "fa fa-star " "checked" ) else : tags_and_diff = tags_and_diff . replace ( "fa fa-star " + str ( star ) , "fa fa-star" ) notebook_object [ "cells" ] . append ( nb . v4 . new_markdown_cell ( tags_and_diff , * * { "metadata" : { "tags" : [ "intro_info_tags" ] } } ) ) # ================= Insertion of the div reserved to the Notebook Description ================== notebook_object [ "cells" ] . append ( nb . v4 . new_markdown_cell ( notebook_description , * * { "metadata" : { "tags" : [ "test" ] } } ) ) notebook_object [ "cells" ] . append ( nb . v4 . new_markdown_cell ( SEPARATOR ) ) # ======================= Insertion of a blank Markdown and Code cell ========================== notebook_object [ "cells" ] . append ( nb . v4 . new_markdown_cell ( MD_EXAMPLES ) ) notebook_object [ "cells" ] . append ( nb . v4 . new_code_cell ( CODE_EXAMPLES ) )
Internal function that is used for generation of the generic notebooks header .
624
13
4,371
def _request ( self , method , path , params = None ) : url = self . _base_url + path try : if method == 'GET' : response = requests . get ( url , timeout = TIMEOUT ) elif method == "POST" : response = requests . post ( url , params , timeout = TIMEOUT ) elif method == "PUT" : response = requests . put ( url , params , timeout = TIMEOUT ) elif method == "DELETE" : response = requests . delete ( url , timeout = TIMEOUT ) if response : return response . json ( ) else : return { 'status' : 'error' } except requests . exceptions . HTTPError : return { 'status' : 'error' } except requests . exceptions . Timeout : return { 'status' : 'offline' } except requests . exceptions . RequestException : return { 'status' : 'offline' }
Make the actual request and returns the parsed response .
193
10
4,372
def post_worker_init ( worker ) : quit_command = 'CTRL-BREAK' if sys . platform == 'win32' else 'CONTROL-C' sys . stdout . write ( "Django version {djangover}, Gunicorn version {gunicornver}, " "using settings {settings!r}\n" "Starting development server at {urls}\n" "Quit the server with {quit_command}.\n" . format ( djangover = django . get_version ( ) , gunicornver = gunicorn . __version__ , settings = os . environ . get ( 'DJANGO_SETTINGS_MODULE' ) , urls = ', ' . join ( 'http://{0}/' . format ( b ) for b in worker . cfg . bind ) , quit_command = quit_command , ) , )
Hook into Gunicorn to display message after launching .
196
12
4,373
def value ( self ) : if 'value' in self . _json_data and self . _json_data [ 'value' ] : return "[Attachment: {}]" . format ( self . _json_data [ 'value' ] . split ( '/' ) [ - 1 ] ) else : return None
Retrieve the data value of this attachment .
66
9
4,374
def filename ( self ) : if self . value and 'value' in self . _json_data and self . _json_data [ 'value' ] : return self . _json_data [ 'value' ] . split ( '/' ) [ - 1 ] return None
Filename of the attachment without the full attachment path .
58
10
4,375
def upload ( self , data , * * kwargs ) : try : import matplotlib . figure if isinstance ( data , matplotlib . figure . Figure ) : self . _upload_plot ( data , * * kwargs ) return except ImportError : pass if isinstance ( data , str ) : with open ( data , 'rb' ) as fp : self . _upload ( fp ) else : self . _upload_json ( data , * * kwargs )
Upload a file to the attachment property .
105
8
4,376
def save_as ( self , filename ) : with open ( filename , 'w+b' ) as f : for chunk in self . _download ( ) : f . write ( chunk )
Download the attachment to a file .
40
7
4,377
def devpiserver_cmdline_run ( xom ) : if xom . config . args . theme == 'semantic-ui' : xom . config . args . theme = resource_filename ( 'devpi_semantic_ui' , '' ) xom . log . info ( "Semantic UI Theme loaded" )
Load theme when theme parameter is semantic - ui .
72
11
4,378
def is_on ( self , channel ) : if channel in self . _is_on : return self . _is_on [ channel ] return False
Check if a switch is turned on
32
7
4,379
def turn_on ( self , channel , callback = None ) : if callback is None : def callb ( ) : """No-op""" pass callback = callb message = velbus . SwitchRelayOnMessage ( self . _address ) message . relay_channels = [ channel ] self . _controller . send ( message , callback )
Turn on switch .
72
4
4,380
def turn_off ( self , channel , callback = None ) : if callback is None : def callb ( ) : """No-op""" pass callback = callb message = velbus . SwitchRelayOffMessage ( self . _address ) message . relay_channels = [ channel ] self . _controller . send ( message , callback )
Turn off switch .
72
4
4,381
def read_dew_point ( self , t = None , rh = None ) : if t is None : t , rh = self . read_t ( ) , None if rh is None : rh = self . read_rh ( t ) t_range = 'water' if t >= 0 else 'ice' tn , m = self . c . tn [ t_range ] , self . c . m [ t_range ] return ( # ch 4.4 tn * ( math . log ( rh / 100.0 ) + ( m * t ) / ( tn + t ) ) / ( m - math . log ( rh / 100.0 ) - m * t / ( tn + t ) ) )
With t and rh provided does not access the hardware .
156
11
4,382
def _put_options ( self , options_list ) : new_options = self . _options . copy ( ) # make a full copy of the dict not to only link it and update dict in place new_options . update ( { "value_choices" : options_list } ) validate ( new_options , options_json_schema ) url = self . _client . _build_url ( 'property' , property_id = self . id ) response = self . _client . _request ( 'PUT' , url , json = { 'options' : new_options } ) if response . status_code != 200 : # pragma: no cover raise APIError ( "Could not update property value. Response: {}" . format ( str ( response ) ) ) else : self . _options = new_options
Save the options to KE - chain .
176
8
4,383
def make_form_or_formset_fields_not_required ( form_or_formset ) : if isinstance ( form_or_formset , BaseFormSet ) : for single_form in form_or_formset : make_form_fields_not_required ( single_form ) else : make_form_fields_not_required ( form_or_formset )
Take a Form or FormSet and set all fields to not required .
85
14
4,384
def scope_id ( self ) : if self . scope : scope_id = self . scope and self . scope . get ( 'id' ) else : pseudo_self = self . _client . activity ( pk = self . id , fields = "id,scope" ) if pseudo_self . scope and pseudo_self . scope . get ( 'id' ) : self . scope = pseudo_self . scope scope_id = self . scope . get ( 'id' ) else : raise NotFoundError ( "This activity '{}'({}) does not belong to a scope, something is weird!" . format ( self . name , self . id ) ) return scope_id
ID of the scope this Activity belongs to .
145
9
4,385
def is_rootlevel ( self ) : container_id = self . _json_data . get ( 'container' ) if container_id : return container_id == self . _json_data . get ( 'root_container' ) else : return False
Determine if Activity is at the root level of a project .
55
14
4,386
def is_configured ( self ) : # check configured based on if we get at least 1 part back associated_models = self . parts ( category = Category . MODEL , limit = 1 ) if associated_models : return True else : return False
Determine if the Activity is configured with input and output properties .
52
14
4,387
def parts ( self , * args , * * kwargs ) : return self . _client . parts ( * args , activity = self . id , * * kwargs )
Retrieve parts belonging to this activity .
38
8
4,388
def associated_parts ( self , * args , * * kwargs ) : return ( self . parts ( category = Category . MODEL , * args , * * kwargs ) , self . parts ( category = Category . INSTANCE , * args , * * kwargs ) )
Retrieve models and instances belonging to this activity .
61
10
4,389
def subprocess ( self ) : subprocess_id = self . _json_data . get ( 'container' ) if subprocess_id == self . _json_data . get ( 'root_container' ) : raise NotFoundError ( "Cannot find subprocess for this task '{}', " "as this task exist on top level." . format ( self . name ) ) return self . _client . activity ( pk = subprocess_id , scope = self . scope_id )
Retrieve the subprocess in which this activity is defined .
107
12
4,390
def siblings ( self , * * kwargs ) : container_id = self . _json_data . get ( 'container' ) return self . _client . activities ( container = container_id , scope = self . scope_id , * * kwargs )
Retrieve the other activities that also belong to the subprocess .
57
13
4,391
def create ( self , * args , * * kwargs ) : if self . activity_type != ActivityType . SUBPROCESS : raise IllegalArgumentError ( "One can only create a task under a subprocess." ) return self . _client . create_activity ( self . id , * args , * * kwargs )
Create a new activity belonging to this subprocess .
72
10
4,392
def customization ( self ) : from . customization import ExtCustomization # For now, we only allow customization in an Ext JS context return ExtCustomization ( activity = self , client = self . _client )
Get a customization object representing the customization of the activity .
42
11
4,393
def all_stop_places_quays ( self ) -> list : all_places = self . stops . copy ( ) for quay in self . quays : all_places . append ( quay ) return all_places
Get all stop places and quays
48
7
4,394
async def expand_all_quays ( self ) -> None : if not self . stops : return headers = { 'ET-Client-Name' : self . _client_name } request = { 'query' : GRAPHQL_STOP_TO_QUAY_TEMPLATE , 'variables' : { 'stops' : self . stops , 'omitNonBoarding' : self . omit_non_boarding } } with async_timeout . timeout ( 10 ) : resp = await self . web_session . post ( RESOURCE , json = request , headers = headers ) if resp . status != 200 : _LOGGER . error ( "Error connecting to Entur, response http status code: %s" , resp . status ) return None result = await resp . json ( ) if 'errors' in result : return for stop_place in result [ 'data' ] [ 'stopPlaces' ] : if len ( stop_place [ 'quays' ] ) > 1 : for quay in stop_place [ 'quays' ] : if quay [ 'estimatedCalls' ] : self . quays . append ( quay [ 'id' ] )
Find all quays from stop places .
255
8
4,395
async def update ( self ) -> None : headers = { 'ET-Client-Name' : self . _client_name } request = { 'query' : self . get_gql_query ( ) , 'variables' : { 'stops' : self . stops , 'quays' : self . quays , 'whitelist' : { 'lines' : self . line_whitelist } , 'numberOfDepartures' : self . number_of_departures , 'omitNonBoarding' : self . omit_non_boarding } } with async_timeout . timeout ( 10 ) : resp = await self . web_session . post ( RESOURCE , json = request , headers = headers ) if resp . status != 200 : _LOGGER . error ( "Error connecting to Entur, response http status code: %s" , resp . status ) return None result = await resp . json ( ) if 'errors' in result : _LOGGER . warning ( "Entur API responded with error message: {error}" , result [ 'errors' ] ) return self . _data = result [ 'data' ] if 'stopPlaces' in self . _data : for stop in self . _data [ 'stopPlaces' ] : self . _process_place ( stop , False ) if 'quays' in self . _data : for quay in self . _data [ 'quays' ] : self . _process_place ( quay , True )
Get the latest data from api . entur . org .
322
12
4,396
def _process_place ( self , place : dict , is_platform : bool ) -> None : place_id = place [ 'id' ] self . info [ place_id ] = Place ( place , is_platform )
Extract information from place dictionary .
48
7
4,397
def serializable_list ( olist , attrs_to_serialize = None , rels_to_expand = None , group_listrels_by = None , rels_to_serialize = None , key_modifications = None , groupby = None , keyvals_to_merge = None , preserve_order = False , dict_struct = None , dict_post_processors = None ) : if groupby : if preserve_order : result = json_encoder ( deep_group ( olist , keys = groupby , serializer = 'todict' , preserve_order = preserve_order , serializer_kwargs = { 'rels_to_serialize' : rels_to_serialize , 'rels_to_expand' : rels_to_expand , 'attrs_to_serialize' : attrs_to_serialize , 'group_listrels_by' : group_listrels_by , 'key_modifications' : key_modifications , 'dict_struct' : dict_struct , 'dict_post_processors' : dict_post_processors } ) ) else : result = deep_group ( olist , keys = groupby , serializer = 'todict' , preserve_order = preserve_order , serializer_kwargs = { 'rels_to_serialize' : rels_to_serialize , 'rels_to_expand' : rels_to_expand , 'attrs_to_serialize' : attrs_to_serialize , 'group_listrels_by' : group_listrels_by , 'key_modifications' : key_modifications , 'dict_struct' : dict_struct , 'dict_post_processors' : dict_post_processors } ) return result else : result_list = map ( lambda o : serialized_obj ( o , attrs_to_serialize = attrs_to_serialize , rels_to_expand = rels_to_expand , group_listrels_by = group_listrels_by , rels_to_serialize = rels_to_serialize , key_modifications = key_modifications , dict_struct = dict_struct , dict_post_processors = dict_post_processors ) , olist ) if keyvals_to_merge : result_list = [ merge ( obj_dict , kvdict ) for obj_dict , kvdict in zip ( result_list , keyvals_to_merge ) ] return result_list
Converts a list of model instances to a list of dictionaries using their todict method .
580
19
4,398
def jsoned ( struct , wrap = True , meta = None , struct_key = 'result' , pre_render_callback = None ) : return _json . dumps ( structured ( struct , wrap = wrap , meta = meta , struct_key = struct_key , pre_render_callback = pre_render_callback ) , default = json_encoder )
Provides a json dump of the struct
77
8
4,399
def as_list ( func ) : @ wraps ( func ) def wrapper ( * args , * * kwargs ) : response = func ( * args , * * kwargs ) if isinstance ( response , Response ) : return response return as_json_list ( response , * * _serializable_params ( request . args , check_groupby = True ) ) return wrapper
A decorator used to return a JSON response of a list of model objects . It expects the decorated function to return a list of model instances . It then converts the instances to dicts and serializes them into a json response
81
45