idx
int64
0
252k
question
stringlengths
48
5.28k
target
stringlengths
5
1.23k
243,200
def get_context_data ( self , ** kwargs ) : context = super ( TermsView , self ) . get_context_data ( ** kwargs ) context [ 'terms_base_template' ] = getattr ( settings , 'TERMS_BASE_TEMPLATE' , DEFAULT_TERMS_BASE_TEMPLATE ) return context
Pass additional context data
243,201
def get_initial ( self ) : LOGGER . debug ( 'termsandconditions.views.AcceptTermsView.get_initial' ) terms = self . get_terms ( self . kwargs ) return_to = self . request . GET . get ( 'returnTo' , '/' ) return { 'terms' : terms , 'returnTo' : return_to }
Override of CreateView method queries for which T&C to accept and catches returnTo from URL
243,202
def post ( self , request , * args , ** kwargs ) : return_url = request . POST . get ( 'returnTo' , '/' ) terms_ids = request . POST . getlist ( 'terms' ) if not terms_ids : return HttpResponseRedirect ( return_url ) if DJANGO_VERSION <= ( 2 , 0 , 0 ) : user_authenticated = request . user . is_authenticated ( ) else : ...
Handles POST request .
243,203
def form_valid ( self , form ) : LOGGER . debug ( 'termsandconditions.views.EmailTermsView.form_valid' ) template = get_template ( "termsandconditions/tc_email_terms.html" ) template_rendered = template . render ( { "terms" : form . cleaned_data . get ( 'terms' ) } ) LOGGER . debug ( "Email Terms Body:" ) LOGGER . debu...
Override of CreateView method sends the email .
243,204
def form_invalid ( self , form ) : LOGGER . debug ( "Invalid Email Form Submitted" ) messages . add_message ( self . request , messages . ERROR , _ ( "Invalid Email Address." ) ) return super ( EmailTermsView , self ) . form_invalid ( form )
Override of CreateView method logs invalid email form submissions .
243,205
def terms_required ( view_func ) : @ wraps ( view_func , assigned = available_attrs ( view_func ) ) def _wrapped_view ( request , * args , ** kwargs ) : if DJANGO_VERSION <= ( 2 , 0 , 0 ) : user_authenticated = request . user . is_authenticated ( ) else : user_authenticated = request . user . is_authenticated if not us...
This decorator checks to see if the user is logged in and if so if they have accepted the site terms .
243,206
def get_active ( slug = DEFAULT_TERMS_SLUG ) : active_terms = cache . get ( 'tandc.active_terms_' + slug ) if active_terms is None : try : active_terms = TermsAndConditions . objects . filter ( date_active__isnull = False , date_active__lte = timezone . now ( ) , slug = slug ) . latest ( 'date_active' ) cache . set ( '...
Finds the latest of a particular terms and conditions
243,207
def get_active_terms_ids ( ) : active_terms_ids = cache . get ( 'tandc.active_terms_ids' ) if active_terms_ids is None : active_terms_dict = { } active_terms_ids = [ ] active_terms_set = TermsAndConditions . objects . filter ( date_active__isnull = False , date_active__lte = timezone . now ( ) ) . order_by ( 'date_acti...
Returns a list of the IDs of of all terms and conditions
243,208
def get_active_terms_list ( ) : active_terms_list = cache . get ( 'tandc.active_terms_list' ) if active_terms_list is None : active_terms_list = TermsAndConditions . objects . filter ( id__in = TermsAndConditions . get_active_terms_ids ( ) ) . order_by ( 'slug' ) cache . set ( 'tandc.active_terms_list' , active_terms_l...
Returns all the latest active terms and conditions
243,209
def get_active_terms_not_agreed_to ( user ) : if TERMS_EXCLUDE_USERS_WITH_PERM is not None : if user . has_perm ( TERMS_EXCLUDE_USERS_WITH_PERM ) and not user . is_superuser : return [ ] not_agreed_terms = cache . get ( 'tandc.not_agreed_terms_' + user . get_username ( ) ) if not_agreed_terms is None : try : LOGGER . d...
Checks to see if a specified user has agreed to all the latest terms and conditions
243,210
def show_terms_if_not_agreed ( context , field = TERMS_HTTP_PATH_FIELD ) : request = context [ 'request' ] url = urlparse ( request . META [ field ] ) not_agreed_terms = TermsAndConditions . get_active_terms_not_agreed_to ( request . user ) if not_agreed_terms and is_path_protected ( url . path ) : return { 'not_agreed...
Displays a modal on a current page if a user has not yet agreed to the given terms . If terms are not specified the default slug is used .
243,211
def user_accept_terms ( backend , user , uid , social_user = None , * args , ** kwargs ) : LOGGER . debug ( 'user_accept_terms' ) if TermsAndConditions . get_active_terms_not_agreed_to ( user ) : return redirect_to_terms_accept ( '/' ) else : return { 'social_user' : social_user , 'user' : user }
Check if the user has accepted the terms and conditions after creation .
243,212
def redirect_to_terms_accept ( current_path = '/' , slug = 'default' ) : redirect_url_parts = list ( urlparse ( ACCEPT_TERMS_PATH ) ) if slug != 'default' : redirect_url_parts [ 2 ] += slug querystring = QueryDict ( redirect_url_parts [ 4 ] , mutable = True ) querystring [ TERMS_RETURNTO_PARAM ] = current_path redirect...
Redirect the user to the terms and conditions accept page .
243,213
def user_terms_updated ( sender , ** kwargs ) : LOGGER . debug ( "User T&C Updated Signal Handler" ) if kwargs . get ( 'instance' ) . user : cache . delete ( 'tandc.not_agreed_terms_' + kwargs . get ( 'instance' ) . user . get_username ( ) )
Called when user terms and conditions is changed - to force cache clearing
243,214
def terms_updated ( sender , ** kwargs ) : LOGGER . debug ( "T&C Updated Signal Handler" ) cache . delete ( 'tandc.active_terms_ids' ) cache . delete ( 'tandc.active_terms_list' ) if kwargs . get ( 'instance' ) . slug : cache . delete ( 'tandc.active_terms_' + kwargs . get ( 'instance' ) . slug ) for utandc in UserTerm...
Called when terms and conditions is changed - to force cache clearing
243,215
def paginate ( parser , token , paginator_class = None ) : try : tag_name , tag_args = token . contents . split ( None , 1 ) except ValueError : msg = '%r tag requires arguments' % token . contents . split ( ) [ 0 ] raise template . TemplateSyntaxError ( msg ) match = PAGINATE_EXPRESSION . match ( tag_args ) if match i...
Paginate objects .
243,216
def get_pages ( parser , token ) : try : tag_name , args = token . contents . split ( None , 1 ) except ValueError : var_name = 'pages' else : args = args . split ( ) if len ( args ) == 2 and args [ 0 ] == 'as' : var_name = args [ 1 ] else : msg = 'Invalid arguments for %r tag' % tag_name raise template . TemplateSynta...
Add to context the list of page links .
243,217
def show_pages ( parser , token ) : if len ( token . contents . split ( ) ) != 1 : msg = '%r tag takes no arguments' % token . contents . split ( ) [ 0 ] raise template . TemplateSyntaxError ( msg ) return ShowPagesNode ( )
Show page links .
243,218
def show_current_number ( parser , token ) : try : tag_name , args = token . contents . split ( None , 1 ) except ValueError : key = None number = None tag_name = token . contents [ 0 ] var_name = None else : match = SHOW_CURRENT_NUMBER_EXPRESSION . match ( args ) if match is None : msg = 'Invalid arguments for %r tag'...
Show the current page number or insert it in the context .
243,219
def page_template ( template , key = PAGE_LABEL ) : def decorator ( view ) : @ wraps ( view ) def decorated ( request , * args , ** kwargs ) : extra_context = kwargs . setdefault ( 'extra_context' , { } ) extra_context [ 'page_template' ] = template querystring_key = request . GET . get ( QS_KEY , request . POST . get ...
Return a view dynamically switching template if the request is Ajax .
243,220
def _get_template ( querystring_key , mapping ) : default = None try : template_and_keys = mapping . items ( ) except AttributeError : template_and_keys = mapping for template , key in template_and_keys : if key is None : key = PAGE_LABEL default = template if key == querystring_key : return template return default
Return the template corresponding to the given querystring_key .
243,221
def get_queryset ( self ) : if self . queryset is not None : queryset = self . queryset if hasattr ( queryset , '_clone' ) : queryset = queryset . _clone ( ) elif self . model is not None : queryset = self . model . _default_manager . all ( ) else : msg = '{0} must define ``queryset`` or ``model``' raise ImproperlyConf...
Get the list of items for this view .
243,222
def get_context_object_name ( self , object_list ) : if self . context_object_name : return self . context_object_name elif hasattr ( object_list , 'model' ) : object_name = object_list . model . _meta . object_name . lower ( ) return smart_str ( '{0}_list' . format ( object_name ) ) else : return None
Get the name of the item to be used in the context .
243,223
def get_page_template ( self , ** kwargs ) : opts = self . object_list . model . _meta return '{0}/{1}{2}{3}.html' . format ( opts . app_label , opts . object_name . lower ( ) , self . template_name_suffix , self . page_template_suffix , )
Return the template name used for this request .
243,224
def render_link ( self ) : extra_context = { 'add_nofollow' : settings . ADD_NOFOLLOW , 'page' : self , 'querystring_key' : self . querystring_key , } if self . is_current : template_name = 'el_pagination/current_link.html' else : template_name = 'el_pagination/page_link.html' if settings . USE_NEXT_PREVIOUS_LINKS : if...
Render the page as a link .
243,225
def previous ( self ) : if self . _page . has_previous ( ) : return self . _endless_page ( self . _page . previous_page_number ( ) , label = settings . PREVIOUS_LABEL ) return ''
Return the previous page .
243,226
def next ( self ) : if self . _page . has_next ( ) : return self . _endless_page ( self . _page . next_page_number ( ) , label = settings . NEXT_LABEL ) return ''
Return the next page .
243,227
def start_index ( self ) : paginator = self . paginator if paginator . count == 0 : return 0 elif self . number == 1 : return 1 return ( ( self . number - 2 ) * paginator . per_page + paginator . first_page + 1 )
Return the 1 - based index of the first item on this page .
243,228
def end_index ( self ) : paginator = self . paginator if self . number == paginator . num_pages : return paginator . count return ( self . number - 1 ) * paginator . per_page + paginator . first_page
Return the 1 - based index of the last item on this page .
243,229
def get_page_numbers ( current_page , num_pages , extremes = DEFAULT_CALLABLE_EXTREMES , arounds = DEFAULT_CALLABLE_AROUNDS , arrows = DEFAULT_CALLABLE_ARROWS ) : page_range = range ( 1 , num_pages + 1 ) pages = [ ] if current_page != 1 : if arrows : pages . append ( 'first' ) pages . append ( 'previous' ) first = page...
Default callable for page listing .
243,230
def _make_elastic_range ( begin , end ) : starting_factor = max ( 1 , ( end - begin ) // 100 ) factor = _iter_factors ( starting_factor ) left_half , right_half = [ ] , [ ] left_val , right_val = begin , end right_val = end while left_val < right_val : left_half . append ( left_val ) right_half . append ( right_val ) n...
Generate an S - curved range of pages .
243,231
def get_elastic_page_numbers ( current_page , num_pages ) : if num_pages <= 10 : return list ( range ( 1 , num_pages + 1 ) ) if current_page == 1 : pages = [ 1 ] else : pages = [ 'first' , 'previous' ] pages . extend ( _make_elastic_range ( 1 , current_page ) ) if current_page != num_pages : pages . extend ( _make_elas...
Alternative callable for page listing .
243,232
def get_prepopulated_value ( field , instance ) : if hasattr ( field . populate_from , '__call__' ) : return field . populate_from ( instance ) else : attr = getattr ( instance , field . populate_from ) return callable ( attr ) and attr ( ) or attr
Returns preliminary value based on populate_from .
243,233
def get_uniqueness_lookups ( field , instance , unique_with ) : for original_lookup_name in unique_with : if '__' in original_lookup_name : field_name , inner_lookup = original_lookup_name . split ( '__' , 1 ) else : field_name , inner_lookup = original_lookup_name , None try : other_field = instance . _meta . get_fiel...
Returns a dict able tuple of lookups to ensure uniqueness of a slug .
243,234
def derivative_colors ( colors ) : return set ( [ ( 'on_' + c ) for c in colors ] + [ ( 'bright_' + c ) for c in colors ] + [ ( 'on_bright_' + c ) for c in colors ] )
Return the names of valid color variants given the base colors .
243,235
def split_into_formatters ( compound ) : merged_segs = [ ] mergeable_prefixes = [ 'no' , 'on' , 'bright' , 'on_bright' ] for s in compound . split ( '_' ) : if merged_segs and merged_segs [ - 1 ] in mergeable_prefixes : merged_segs [ - 1 ] += '_' + s else : merged_segs . append ( s ) return merged_segs
Split a possibly compound format string into segments .
243,236
def location ( self , x = None , y = None ) : self . stream . write ( self . save ) if x is not None and y is not None : self . stream . write ( self . move ( y , x ) ) elif x is not None : self . stream . write ( self . move_x ( x ) ) elif y is not None : self . stream . write ( self . move_y ( y ) ) try : yield final...
Return a context manager for temporarily moving the cursor .
243,237
def fullscreen ( self ) : self . stream . write ( self . enter_fullscreen ) try : yield finally : self . stream . write ( self . exit_fullscreen )
Return a context manager that enters fullscreen mode while inside it and restores normal mode on leaving .
243,238
def hidden_cursor ( self ) : self . stream . write ( self . hide_cursor ) try : yield finally : self . stream . write ( self . normal_cursor )
Return a context manager that hides the cursor while inside it and makes it visible on leaving .
243,239
def _resolve_formatter ( self , attr ) : if attr in COLORS : return self . _resolve_color ( attr ) elif attr in COMPOUNDABLES : return self . _formatting_string ( self . _resolve_capability ( attr ) ) else : formatters = split_into_formatters ( attr ) if all ( f in COMPOUNDABLES for f in formatters ) : return self . _f...
Resolve a sugary or plain capability name color or compound formatting function name into a callable capability .
243,240
def _resolve_capability ( self , atom ) : code = tigetstr ( self . _sugar . get ( atom , atom ) ) if code : return code . decode ( 'latin1' ) return u''
Return a terminal code for a capname or a sugary name or an empty Unicode .
243,241
def _resolve_color ( self , color ) : color_cap = ( self . _background_color if 'on_' in color else self . _foreground_color ) offset = 8 if 'bright_' in color else 0 base_color = color . rsplit ( '_' , 1 ) [ - 1 ] return self . _formatting_string ( color_cap ( getattr ( curses , 'COLOR_' + base_color . upper ( ) ) + o...
Resolve a color like red or on_bright_green into a callable capability .
243,242
def send_login_code ( self , code , context , ** kwargs ) : from_number = self . from_number or getattr ( settings , 'DEFAULT_FROM_NUMBER' ) sms_content = render_to_string ( self . template_name , context ) self . twilio_client . messages . create ( to = code . user . phone_number , from_ = from_number , body = sms_con...
Send a login code via SMS
243,243
def load ( filename , ** kwargs ) : with open ( filename , 'rb' ) as f : reader = T7Reader ( f , ** kwargs ) return reader . read_obj ( )
Loads the given t7 file using default settings ; kwargs are forwarded to T7Reader .
243,244
def check ( self ) : if self . lastrun + self . interval < time . time ( ) : return True else : return False
Returns True if interval seconds have passed since it last ran
243,245
def make_union ( * transformers , ** kwargs ) : n_jobs = kwargs . pop ( 'n_jobs' , 1 ) concatenate = kwargs . pop ( 'concatenate' , True ) if kwargs : raise TypeError ( 'Unknown keyword arguments: "{}"' . format ( list ( kwargs . keys ( ) ) [ 0 ] ) ) return FeatureUnion ( _name_estimators ( transformers ) , n_jobs = n_...
Construct a FeatureUnion from the given transformers .
243,246
def get_feature_names ( self ) : feature_names = [ ] for name , trans , weight in self . _iter ( ) : if not hasattr ( trans , 'get_feature_names' ) : raise AttributeError ( "Transformer %s (type %s) does not " "provide get_feature_names." % ( str ( name ) , type ( trans ) . __name__ ) ) feature_names . extend ( [ name ...
Get feature names from all transformers .
243,247
def fit ( self , X , y = None ) : self . transformer_list = list ( self . transformer_list ) self . _validate_transformers ( ) with Pool ( self . n_jobs ) as pool : transformers = pool . starmap ( _fit_one_transformer , ( ( trans , X [ trans [ 'col_pick' ] ] if hasattr ( trans , 'col_pick' ) else X , y ) for _ , trans ...
Fit all transformers using X .
243,248
def fit_transform ( self , X , y = None , ** fit_params ) : self . _validate_transformers ( ) with Pool ( self . n_jobs ) as pool : result = pool . starmap ( _fit_transform_one , ( ( trans , weight , X [ trans [ 'col_pick' ] ] if hasattr ( trans , 'col_pick' ) else X , y ) for name , trans , weight in self . _iter ( ) ...
Fit all transformers transform the data and concatenate results .
243,249
def transform ( self , X ) : with Pool ( self . n_jobs ) as pool : Xs = pool . starmap ( _transform_one , ( ( trans , weight , X [ trans [ 'col_pick' ] ] if hasattr ( trans , 'col_pick' ) else X ) for name , trans , weight in self . _iter ( ) ) ) if not Xs : return np . zeros ( ( X . shape [ 0 ] , 0 ) ) if self . conca...
Transform X separately by each transformer concatenate results .
243,250
def split_batches ( self , data , minibatch_size = None ) : if minibatch_size == None : minibatch_size = self . minibatch_size if isinstance ( data , list ) or isinstance ( data , tuple ) : len_data = len ( data ) else : len_data = data . shape [ 0 ] if isinstance ( data , pd . DataFrame ) : data_split = [ data . iloc ...
Split data into minibatches with a specified size
243,251
def merge_batches ( self , data ) : if isinstance ( data [ 0 ] , ssp . csr_matrix ) : return ssp . vstack ( data ) if isinstance ( data [ 0 ] , pd . DataFrame ) or isinstance ( data [ 0 ] , pd . Series ) : return pd . concat ( data ) return [ item for sublist in data for item in sublist ]
Merge a list of data minibatches into one single instance representing the data
243,252
def shuffle_batch ( self , texts , labels = None , seed = None ) : if seed != None : random . seed ( seed ) index_shuf = list ( range ( len ( texts ) ) ) random . shuffle ( index_shuf ) texts = [ texts [ x ] for x in index_shuf ] if labels == None : return texts labels = [ labels [ x ] for x in index_shuf ] return text...
Shuffle a list of samples as well as the labels if specified
243,253
def demeshgrid ( arr ) : dim = len ( arr . shape ) for i in range ( dim ) : Slice1 = [ 0 ] * dim Slice2 = [ 1 ] * dim Slice1 [ i ] = slice ( None ) Slice2 [ i ] = slice ( None ) if ( arr [ tuple ( Slice1 ) ] == arr [ tuple ( Slice2 ) ] ) . all ( ) : return arr [ tuple ( Slice1 ) ]
Turns an ndarray created by a meshgrid back into a 1D array
243,254
def timeline_slider ( self , text = 'Time' , ax = None , valfmt = None , color = None ) : if ax is None : adjust_plot = { 'bottom' : .2 } rect = [ .18 , .05 , .5 , .03 ] plt . subplots_adjust ( ** adjust_plot ) self . slider_ax = plt . axes ( rect ) else : self . slider_ax = ax if valfmt is None : if ( np . issubdtype ...
Creates a timeline slider .
243,255
def controls ( self , timeline_slider_args = { } , toggle_args = { } ) : self . timeline_slider ( ** timeline_slider_args ) self . toggle ( ** toggle_args )
Creates interactive controls for the animation
243,256
def save_gif ( self , filename ) : self . timeline . index -= 1 self . animation . save ( filename + '.gif' , writer = PillowWriter ( fps = self . timeline . fps ) )
Saves the animation to a gif
243,257
def save ( self , * args , ** kwargs ) : self . timeline . index -= 1 self . animation . save ( * args , ** kwargs )
Saves an animation
243,258
def isin_alone ( elems , line ) : found = False for e in elems : if line . strip ( ) . lower ( ) == e . lower ( ) : found = True break return found
Check if an element from a list is the only element of a string .
243,259
def isin_start ( elems , line ) : found = False elems = [ elems ] if type ( elems ) is not list else elems for e in elems : if line . lstrip ( ) . lower ( ) . startswith ( e ) : found = True break return found
Check if an element from a list starts a string .
243,260
def isin ( elems , line ) : found = False for e in elems : if e in line . lower ( ) : found = True break return found
Check if an element from a list is in a string .
243,261
def get_leading_spaces ( data ) : spaces = '' m = re . match ( r'^(\s*)' , data ) if m : spaces = m . group ( 1 ) return spaces
Get the leading space of a string if it is not empty
243,262
def get_mandatory_sections ( self ) : return [ s for s in self . opt if s not in self . optional_sections and s not in self . excluded_sections ]
Get mandatory sections
243,263
def get_raw_not_managed ( self , data ) : keys = [ 'also' , 'ref' , 'note' , 'other' , 'example' , 'method' , 'attr' ] elems = [ self . opt [ k ] for k in self . opt if k in keys ] data = data . splitlines ( ) start = 0 init = 0 raw = '' spaces = None while start != - 1 : start , end = self . get_next_section_lines ( d...
Get elements not managed . They can be used as is .
243,264
def get_key_section_header ( self , key , spaces ) : header = super ( NumpydocTools , self ) . get_key_section_header ( key , spaces ) header = spaces + header + '\n' + spaces + '-' * len ( header ) + '\n' return header
Get the key of the header section
243,265
def autodetect_style ( self , data ) : found_keys = defaultdict ( int ) for style in self . tagstyles : for key in self . opt : found_keys [ style ] += data . count ( self . opt [ key ] [ style ] [ 'name' ] ) fkey = max ( found_keys , key = found_keys . get ) detected_style = fkey if found_keys [ fkey ] else 'unknown' ...
Determine the style of a docstring and sets it as the default input one for the instance .
243,266
def _get_options ( self , style ) : return [ self . opt [ o ] [ style ] [ 'name' ] for o in self . opt ]
Get the list of keywords for a particular style
243,267
def get_group_key_line ( self , data , key ) : idx = - 1 for i , line in enumerate ( data . splitlines ( ) ) : if isin_start ( self . groups [ key ] , line ) : idx = i return idx
Get the next group - style key s line number .
243,268
def get_group_key_index ( self , data , key ) : idx = - 1 li = self . get_group_key_line ( data , key ) if li != - 1 : idx = 0 for line in data . splitlines ( ) [ : li ] : idx += len ( line ) + len ( '\n' ) return idx
Get the next groups style s starting line index for a key
243,269
def get_group_line ( self , data ) : idx = - 1 for key in self . groups : i = self . get_group_key_line ( data , key ) if ( i < idx and i != - 1 ) or idx == - 1 : idx = i return idx
Get the next group - style key s line .
243,270
def get_group_index ( self , data ) : idx = - 1 li = self . get_group_line ( data ) if li != - 1 : idx = 0 for line in data . splitlines ( ) [ : li ] : idx += len ( line ) + len ( '\n' ) return idx
Get the next groups style s starting line index
243,271
def get_key_index ( self , data , key , starting = True ) : key = self . opt [ key ] [ self . style [ 'in' ] ] [ 'name' ] if key . startswith ( ':returns' ) : data = data . replace ( ':return:' , ':returns:' ) idx = len ( data ) ini = 0 loop = True if key in data : while loop : i = data . find ( key ) if i != - 1 : if ...
Get from a docstring the next option with a given key .
243,272
def _extract_docs_description ( self ) : data = '\n' . join ( [ d . rstrip ( ) . replace ( self . docs [ 'out' ] [ 'spaces' ] , '' , 1 ) for d in self . docs [ 'in' ] [ 'raw' ] . splitlines ( ) ] ) if self . dst . style [ 'in' ] == 'groups' : idx = self . dst . get_group_index ( data ) elif self . dst . style [ 'in' ] ...
Extract main description from docstring
243,273
def _extract_groupstyle_docs_params ( self ) : data = '\n' . join ( [ d . rstrip ( ) . replace ( self . docs [ 'out' ] [ 'spaces' ] , '' , 1 ) for d in self . docs [ 'in' ] [ 'raw' ] . splitlines ( ) ] ) idx = self . dst . get_group_key_line ( data , 'param' ) if idx >= 0 : data = data . splitlines ( ) [ idx + 1 : ] en...
Extract group style parameters
243,274
def _extract_docs_return ( self ) : if self . dst . style [ 'in' ] == 'numpydoc' : data = '\n' . join ( [ d . rstrip ( ) . replace ( self . docs [ 'out' ] [ 'spaces' ] , '' , 1 ) for d in self . docs [ 'in' ] [ 'raw' ] . splitlines ( ) ] ) self . docs [ 'in' ] [ 'return' ] = self . dst . numpydoc . get_return_list ( da...
Extract return description and type
243,275
def _extract_docs_other ( self ) : if self . dst . style [ 'in' ] == 'numpydoc' : data = '\n' . join ( [ d . rstrip ( ) . replace ( self . docs [ 'out' ] [ 'spaces' ] , '' , 1 ) for d in self . docs [ 'in' ] [ 'raw' ] . splitlines ( ) ] ) lst = self . dst . numpydoc . get_list_key ( data , 'also' ) lst = self . dst . n...
Extract other specific sections
243,276
def _set_desc ( self ) : if self . docs [ 'in' ] [ 'desc' ] : self . docs [ 'out' ] [ 'desc' ] = self . docs [ 'in' ] [ 'desc' ] else : self . docs [ 'out' ] [ 'desc' ] = ''
Sets the global description if any
243,277
def _set_params ( self ) : if self . docs [ 'in' ] [ 'params' ] : self . docs [ 'out' ] [ 'params' ] = list ( self . docs [ 'in' ] [ 'params' ] ) for e in self . element [ 'params' ] : if type ( e ) is tuple : param = e [ 0 ] else : param = e found = False for i , p in enumerate ( self . docs [ 'out' ] [ 'params' ] ) :...
Sets the parameters with types descriptions and default value if any
243,278
def _set_raises ( self ) : if self . docs [ 'in' ] [ 'raises' ] : if self . dst . style [ 'out' ] != 'numpydoc' or self . dst . style [ 'in' ] == 'numpydoc' or ( self . dst . style [ 'out' ] == 'numpydoc' and 'raise' not in self . dst . numpydoc . get_excluded_sections ( ) ) : self . docs [ 'out' ] [ 'raises' ] = list ...
Sets the raises and descriptions
243,279
def _set_return ( self ) : if type ( self . docs [ 'in' ] [ 'return' ] ) is list and self . dst . style [ 'out' ] not in [ 'groups' , 'numpydoc' , 'google' ] : lst = self . docs [ 'in' ] [ 'return' ] if lst : if lst [ 0 ] [ 0 ] is not None : self . docs [ 'out' ] [ 'return' ] = "%s-> %s" % ( lst [ 0 ] [ 0 ] , lst [ 0 ]...
Sets the return parameter with description and rtype if any
243,280
def _set_other ( self ) : if self . dst . style [ 'in' ] == 'numpydoc' : if self . docs [ 'in' ] [ 'raw' ] is not None : self . docs [ 'out' ] [ 'post' ] = self . dst . numpydoc . get_raw_not_managed ( self . docs [ 'in' ] [ 'raw' ] ) elif 'post' not in self . docs [ 'out' ] or self . docs [ 'out' ] [ 'post' ] is None ...
Sets other specific sections
243,281
def _set_raw ( self ) : sep = self . dst . get_sep ( target = 'out' ) sep = sep + ' ' if sep != ' ' else sep with_space = lambda s : '\n' . join ( [ self . docs [ 'out' ] [ 'spaces' ] + l if i > 0 else l for i , l in enumerate ( s . splitlines ( ) ) ] ) raw = self . docs [ 'out' ] [ 'spaces' ] + self . quotes desc = se...
Sets the output raw docstring
243,282
def generate_docs ( self ) : if self . dst . style [ 'out' ] == 'numpydoc' and self . dst . numpydoc . first_line is not None : self . first_line = self . dst . numpydoc . first_line self . _set_desc ( ) self . _set_params ( ) self . _set_return ( ) self . _set_raises ( ) self . _set_other ( ) self . _set_raw ( ) self ...
Generates the output docstring
243,283
def get_files_from_dir ( path , recursive = True , depth = 0 , file_ext = '.py' ) : file_list = [ ] if os . path . isfile ( path ) or path == '-' : return [ path ] if path [ - 1 ] != os . sep : path = path + os . sep for f in glob . glob ( path + "*" ) : if os . path . isdir ( f ) : if depth < MAX_DEPTH_RECUR : file_li...
Retrieve the list of files from a folder .
243,284
def get_config ( config_file ) : config = { } tobool = lambda s : True if s . lower ( ) == 'true' else False if config_file : try : f = open ( config_file , 'r' ) except : print ( "Unable to open configuration file '{0}'" . format ( config_file ) ) else : for line in f . readlines ( ) : if len ( line . strip ( ) ) : ke...
Get the configuration from a file .
243,285
def get_output_docs ( self ) : if not self . parsed : self . _parse ( ) lst = [ ] for e in self . docs_list : lst . append ( e [ 'docs' ] . get_raw_docs ( ) ) return lst
Return the output docstrings once formatted
243,286
def compute_before_after ( self ) : if not self . parsed : self . _parse ( ) list_from = self . input_lines list_to = [ ] last = 0 for e in self . docs_list : start , end = e [ 'location' ] if start <= 0 : start , end = - start , - end list_to . extend ( list_from [ last : start + 1 ] ) else : list_to . extend ( list_f...
Compute the list of lines before and after the proposed docstring changes .
243,287
def diff ( self , source_path = '' , target_path = '' , which = - 1 ) : list_from , list_to = self . compute_before_after ( ) if source_path . startswith ( os . sep ) : source_path = source_path [ 1 : ] if source_path and not source_path . endswith ( os . sep ) : source_path += os . sep if target_path . startswith ( os...
Build the diff between original docstring and proposed docstring .
243,288
def get_patch_lines ( self , source_path , target_path ) : diff = self . diff ( source_path , target_path ) return [ "# Patch generated by Pyment v{0}\n\n" . format ( __version__ ) ] + diff
Return the diff between source_path and target_path
243,289
def write_patch_file ( self , patch_file , lines_to_write ) : with open ( patch_file , 'w' ) as f : f . writelines ( lines_to_write )
Write lines_to_write to a the file called patch_file
243,290
def overwrite_source_file ( self , lines_to_write ) : tmp_filename = '{0}.writing' . format ( self . input_file ) ok = False try : with open ( tmp_filename , 'w' ) as fh : fh . writelines ( lines_to_write ) ok = True finally : if ok : if platform . system ( ) == 'Windows' : self . _windows_rename ( tmp_filename ) else ...
overwrite the file with line_to_write
243,291
def by_own_time_per_call ( stat ) : return ( - stat . own_time_per_call if stat . own_hits else - stat . own_time , by_deep_time_per_call ( stat ) )
Sorting by exclusive elapsed time per call in descending order .
243,292
def result ( self ) : try : cpu_time = max ( 0 , time . clock ( ) - self . _cpu_time_started ) wall_time = max ( 0 , time . time ( ) - self . _wall_time_started ) except AttributeError : cpu_time = wall_time = 0.0 return self . stats , cpu_time , wall_time
Gets the frozen statistics to serialize by Pickle .
243,293
def dump ( self , dump_filename , pickle_protocol = pickle . HIGHEST_PROTOCOL ) : result = self . result ( ) with open ( dump_filename , 'wb' ) as f : pickle . dump ( ( self . __class__ , result ) , f , pickle_protocol )
Saves the profiling result to a file
243,294
def make_viewer ( self , title = None , at = None ) : viewer = StatisticsViewer ( ) viewer . set_profiler_class ( self . __class__ ) stats , cpu_time , wall_time = self . result ( ) viewer . set_result ( stats , cpu_time , wall_time , title = title , at = at ) viewer . activate ( ) return viewer
Makes a statistics viewer from the profiling result .
243,295
def pack_msg ( method , msg , pickle_protocol = PICKLE_PROTOCOL ) : dump = io . BytesIO ( ) pickle . dump ( msg , dump , pickle_protocol ) size = dump . tell ( ) return ( struct . pack ( METHOD_STRUCT_FORMAT , method ) + struct . pack ( SIZE_STRUCT_FORMAT , size ) + dump . getvalue ( ) )
Packs a method and message .
243,296
def recv ( sock , size ) : data = sock . recv ( size , socket . MSG_WAITALL ) if len ( data ) < size : raise socket . error ( ECONNRESET , 'Connection closed' ) return data
Receives exactly size bytes . This function blocks the thread .
243,297
def recv_msg ( sock ) : data = recv ( sock , struct . calcsize ( METHOD_STRUCT_FORMAT ) ) method , = struct . unpack ( METHOD_STRUCT_FORMAT , data ) data = recv ( sock , struct . calcsize ( SIZE_STRUCT_FORMAT ) ) size , = struct . unpack ( SIZE_STRUCT_FORMAT , data ) data = recv ( sock , size ) msg = pickle . loads ( d...
Receives a method and message from the socket . This function blocks the current thread .
243,298
def connected ( self , client ) : self . clients . add ( client ) self . _log_connected ( client ) self . _start_watching ( client ) self . send_msg ( client , WELCOME , ( self . pickle_protocol , __version__ ) , pickle_protocol = 0 ) profiler = self . profiler while True : try : profiler = profiler . profiler except A...
Call this method when a client connected .
243,299
def disconnected ( self , client ) : if client not in self . clients : return self . clients . remove ( client ) self . _log_disconnected ( client ) self . _close ( client )
Call this method when a client disconnected .