idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
7,900
def refresh ( self , key , ttl , prev_value = None , prev_index = None , timeout = None ) : return self . adapter . set ( key , ttl = ttl , refresh = True , prev_value = prev_value , prev_index = prev_index , timeout = timeout )
Sets only a TTL of a key . The waiters doesn t receive notification by this operation .
66
20
7,901
def create ( self , key , value = None , dir = False , ttl = None , timeout = None ) : return self . adapter . set ( key , value , dir = dir , ttl = ttl , prev_exist = False , timeout = timeout )
Creates a new key .
56
6
7,902
def update ( self , key , value = None , dir = False , ttl = None , refresh = False , prev_value = None , prev_index = None , timeout = None ) : return self . adapter . set ( key , value , dir = dir , ttl = ttl , refresh = refresh , prev_value = prev_value , prev_index = prev_index , prev_exist = True , timeout = timeout )
Updates an existing key .
92
6
7,903
def append ( self , key , value = None , dir = False , ttl = None , timeout = None ) : return self . adapter . append ( key , value , dir = dir , ttl = ttl , timeout = timeout )
Creates a new automatically increasing key in the given directory key .
50
13
7,904
def delete ( self , key , dir = False , recursive = False , prev_value = None , prev_index = None , timeout = None ) : return self . adapter . delete ( key , dir = dir , recursive = recursive , prev_value = prev_value , prev_index = prev_index , timeout = timeout )
Deletes a key .
69
5
7,905
def find ( self , cell_designation , cell_filter = lambda x , c : 'c' in x and x [ 'c' ] == c ) : res = [ i for i , sc in enumerate ( self . spike_containers ) if cell_filter ( sc . meta , cell_designation ) ] if len ( res ) > 0 : return res [ 0 ]
finds spike containers in a multi spike containers collection
82
10
7,906
def len ( self , resolution = 1.0 , units = None , conversion_function = convert_time , end_at_end = True ) : if units is not None : resolution = conversion_function ( resolution , from_units = units , to_units = self . units ) else : units = self . units if self . min is None : return int ( self . max / resolution ) if self . max is None : return 0 if units != '1' and end_at_end : return int ( np . ceil ( ( self . max - self . min ) / resolution ) ) return int ( np . ceil ( ( self . max - self . min ) / resolution ) + 1 )
Calculates the length of the Label Dimension from its minimum maximum and wether it is discrete .
149
20
7,907
def logspace ( self , bins = None , units = None , conversion_function = convert_time , resolution = None , end_at_end = True ) : if type ( bins ) in [ list , np . ndarray ] : return bins min = conversion_function ( self . min , from_units = self . units , to_units = units ) max = conversion_function ( self . max , from_units = self . units , to_units = units ) if units is None : units = self . units if resolution is None : resolution = 1.0 if bins is None : bins = self . len ( resolution = resolution , units = units , conversion_function = conversion_function ) # + 1 if units != '1' and end_at_end : # continuous variable behaviour: # we end with the last valid value at the outer edge return np . logspace ( np . log10 ( min ) , np . log10 ( max ) , bins + 1 ) [ : - 1 ] # discrete variable behaviour: # we end with the last valid value as its own bin return np . logspace ( np . log10 ( min ) , np . log10 ( max ) , bins )
bins overwrites resolution
253
5
7,908
def constraint_range_dict ( self , * args , * * kwargs ) : bins = self . bins ( * args , * * kwargs ) return [ { self . name + '__gte' : a , self . name + '__lt' : b } for a , b in zip ( bins [ : - 1 ] , bins [ 1 : ] ) ] space = self . space ( * args , * * kwargs ) resolution = space [ 1 ] - space [ 0 ] return [ { self . name + '__gte' : s , self . name + '__lt' : s + resolution } for s in space ]
Creates a list of dictionaries which each give a constraint for a certain section of the dimension .
140
20
7,909
def find_labels ( self , key , find_in_name = True , find_in_units = False ) : if type ( key ) is str : found_keys = [ ] if key . startswith ( '~' ) : for label_no , label in enumerate ( self . labels ) : if find_in_name and key [ 1 : ] in label . name : found_keys . append ( label_no ) if find_in_units and key [ 1 : ] in label . units : found_keys . append ( label_no ) else : for label_no , label in enumerate ( self . labels ) : if find_in_name and key == label . name : return [ label_no ] if find_in_units and key == label . units : return [ label_no ] return found_keys if hasattr ( key , '__call__' ) : found_keys = [ ] for label_no , label in enumerate ( self . labels ) : if key ( label ) : found_keys . append ( label_no ) return found_keys if type ( key ) is int : return [ key ] if key < self . matrix . shape [ 1 ] else [ ] return [ key ]
Takes a string or a function to find a set of label indizes that match . If the string starts with a ~ the label only has to contain the string .
265
34
7,910
def convert ( self , label , units = None , conversion_function = convert_time ) : label_no = self . get_label_no ( label ) new_label , new_column = self . get_converted ( label_no , units , conversion_function ) labels = [ LabelDimension ( l ) for l in self . labels ] labels [ label_no ] = new_label matrix = self . matrix . copy ( ) matrix [ : , label_no ] = new_column return LabeledMatrix ( matrix , labels )
converts a dimension in place
115
6
7,911
def _get_constrained_labels ( self , remove_dimensions = False , * * kwargs ) : new_labels = [ ] for label_no , label in enumerate ( self . labels ) : new_label = LabelDimension ( label ) remove = False for k in kwargs : if k == label . name : new_label . max = kwargs [ k ] new_label . min = kwargs [ k ] remove = True if k == label . name + '__lt' : if new_label . units == '1' : new_label . max = np . min ( [ new_label . max , kwargs [ k ] - 1 ] ) # is this right? else : new_label . max = np . min ( [ new_label . max , kwargs [ k ] ] ) #remove = True if k == label . name + '__lte' : new_label . max = np . min ( [ new_label . max , kwargs [ k ] ] ) #remove = True if k == label . name + '__gt' : if new_label . units == '1' : new_label . min = np . max ( [ new_label . min , kwargs [ k ] + 1 ] ) else : new_label . min = np . max ( [ new_label . min , kwargs [ k ] ] ) #remove = True if k == label . name + '__gte' : new_label . min = np . max ( [ new_label . min , kwargs [ k ] ] ) #remove = True if k == label . name + '__evals' : remove = True if remove_dimensions : if remove : # skipping removed labels continue new_labels . append ( new_label ) return new_labels
returns labels which have updated minima and maxima depending on the kwargs supplied to this
400
20
7,912
def store_meta ( self , meta ) : if self . meta is None : self . meta = { } self . meta . update ( meta ) return self
Inplace method that adds meta information to the meta dictionary
33
11
7,913
def find ( self , cell_designation , cell_filter = lambda x , c : 'c' in x and x [ 'c' ] == c ) : if 'parent' in self . meta : return ( self . meta [ 'parent' ] , self . meta [ 'parent' ] . find ( cell_designation , cell_filter = cell_filter ) )
finds spike containers in multi spike containers collection offspring
80
10
7,914
def ISIs ( self , time_dimension = 0 , units = None , min_t = None , max_t = None ) : units = self . _default_units ( units ) converted_dimension , st = self . spike_times . get_converted ( time_dimension , units ) if min_t is None : min_t = converted_dimension . min if max_t is None : max_t = converted_dimension . max return np . diff ( sorted ( st [ ( st > min_t ) * ( st < max_t ) ] ) )
returns the Inter Spike Intervals
122
7
7,915
def temporal_firing_rate ( self , time_dimension = 0 , resolution = 1.0 , units = None , min_t = None , max_t = None , weight_function = None , normalize_time = False , normalize_n = False , start_units_with_0 = True , cell_dimension = 'N' ) : units = self . _default_units ( units ) if self . data_format == 'spike_times' : converted_dimension , st = self . spike_times . get_converted ( 0 , units ) if min_t is None : min_t = converted_dimension . min if max_t is None : max_t = converted_dimension . max st = st [ ( st >= min_t ) * ( st < max_t ) ] bins = converted_dimension . linspace_by_resolution ( resolution , end_at_end = True , extra_bins = 0 ) H , edg = np . histogram ( st , bins = bins ) if normalize_time : H = H / ( convert_time ( resolution , from_units = units , to_units = 's' ) ) # make it Hertz if normalize_n : H = H / ( len ( np . unique ( self . spike_times [ cell_dimension ] ) ) ) return H , edg
Outputs a time histogram of spikes .
294
9
7,916
def plot_temporal_firing_rate ( self , time_dimension = 0 , resolution = 1.0 , units = None , min_t = None , max_t = None , weight_function = None , normalize_time = False , normalize_n = False , start_units_with_0 = True , cell_dimension = 'N' , * * kwargs ) : if bool ( self ) : import matplotlib . pylab as plt H , ed = self . temporal_firing_rate ( time_dimension = time_dimension , resolution = resolution , units = units , min_t = min_t , max_t = max_t , weight_function = weight_function , normalize_time = normalize_time , normalize_n = normalize_n , start_units_with_0 = start_units_with_0 , cell_dimension = cell_dimension ) plt . plot ( ed [ 1 : ] , H , * * kwargs )
Plots a firing rate plot .
219
7
7,917
def get_units ( self , * args , * * kwargs ) : if len ( args ) == 1 : return self . spike_times . get_label ( args [ 0 ] ) . units return [ self . spike_times . get_label ( a ) . units for a in args ]
Returns the units of a Dimension
64
6
7,918
def get_min ( self , * args , * * kwargs ) : if len ( args ) == 1 : return self . spike_times . get_label ( args [ 0 ] ) . min return [ self . spike_times . get_label ( a ) . max for a in args ]
Returns the minimum of a Dimension
64
6
7,919
def get_max ( self , * args , * * kwargs ) : if len ( args ) == 1 : return self . spike_times . get_label ( args [ 0 ] ) . max return [ self . spike_times . get_label ( a ) . max for a in args ]
Returns the maximum of a Dimension
64
6
7,920
def linspace_bins ( self , dim , * args , * * kwargs ) : return self . spike_times . get_label ( dim ) . linspace_bins ( * args , * * kwargs )
Like linspace but shifts the space to create edges for histograms .
52
15
7,921
def create_SpikeGeneratorGroup ( self , time_label = 0 , index_label = 1 , reorder_indices = False , index_offset = True ) : import brian2 spike_times = self . spike_times . convert ( time_label , 's' ) [ time_label ] * brian2 . second indices = [ 0 ] * len ( spike_times ) if len ( self . spike_times . find_labels ( index_label ) ) : indices = self . spike_times [ index_label ] if index_offset is not False : if index_offset is True : indices = indices - self . spike_times . get_label ( index_label ) . min else : indices = indices - index_offset N = np . max ( indices ) else : N = self . spike_times . get_label ( index_label ) . max if reorder_indices : indices_levels = np . sort ( np . unique ( indices ) ) . tolist ( ) indices = np . array ( [ indices_levels . index ( i ) for i in indices ] ) N = len ( indices_levels ) return brian2 . SpikeGeneratorGroup ( N + 1 , indices = indices , times = spike_times )
Creates a brian 2 create_SpikeGeneratorGroup object that contains the spikes in this container .
269
22
7,922
def to_neo ( self , index_label = 'N' , time_label = 0 , name = 'segment of exported spikes' , index = 0 ) : import neo from quantities import s seq = neo . Segment ( name = name , index = index ) t_start = None t_stop = None if self . min_t is not None : t_start = convert_time ( self . min_t , from_units = self . units , to_units = 's' ) * s if self . max_t is not None : t_stop = convert_time ( self . max_t , from_units = self . units , to_units = 's' ) * s for train in self . generate ( index_label ) : seq . spiketrains . append ( neo . SpikeTrain ( train . spike_times . get_converted ( time_label , 's' ) [ 1 ] * s , t_start = t_start , t_stop = t_stop ) ) return seq
Returns a neo Segment containing the spike trains .
222
10
7,923
def qteModificationChanged ( self , mod ) : if mod : s = '*' else : s = '-' self . _qteModeBar . qteChangeModeValue ( 'MODIFIED' , s )
Update the modification status in the mode bar .
47
9
7,924
def loadFile ( self , fileName ) : self . fileName = fileName # Assign QFile object with the current name. self . file = QtCore . QFile ( fileName ) if self . file . exists ( ) : # Load the file into the widget and reset the undo stack # to delete the undo object create by the setText method. # Without it, an undo operation would delete the content # of the widget which is intuitive. self . qteScintilla . setText ( open ( fileName ) . read ( ) ) self . qteScintilla . qteUndoStack . reset ( ) else : msg = "File <b>{}</b> does not exist" . format ( self . qteAppletID ( ) ) self . qteLogger . info ( msg )
Display the file fileName .
174
6
7,925
def conv ( arg , default = None , func = None ) : if func : return func ( arg ) if arg else default else : return arg if arg else default
essentially the generalization of arg if arg else default
34
11
7,926
def dump_pickle ( name , obj ) : with open ( name , "wb" ) as f : pickle . dump ( obj , f , 2 ) pass
quick pickle dump similar to np . save
35
9
7,927
def chunks ( l , n ) : #http://stackoverflow.com/a/3226719 #...not that this is hard to understand. return [ l [ x : x + n ] for x in range ( 0 , len ( l ) , n ) ]
chunk l in n sized bits
57
7
7,928
def check_vprint ( s , vprinter ) : if vprinter is True : print ( s ) elif callable ( vprinter ) : vprinter ( s )
checked verbose printing
40
4
7,929
def filelines ( fname , strip = False ) : with open ( fname , 'r' ) as f : lines = f . readlines ( ) if strip : lines [ : ] = [ line . strip ( ) for line in lines ] return lines
read lines from a file into lines ... optional strip
54
10
7,930
def parse_utuple ( s , urx , length = 2 ) : if type ( urx ) != str : urx = urx . pattern if length is not None and length < 1 : raise ValueError ( "invalid length: {}" . format ( length ) ) if length == 1 : rx = r"^ *\( *{urx} *,? *\) *$" . format ( urx = urx ) elif length is None : rx = r"^ *\( *(?:{urx} *, *)*{urx} *,? *\) *$" . format ( urx = urx ) else : rx = r"^ *\( *(?:{urx} *, *){{{rep1}}}{urx} *,? *\) *$" . format ( rep1 = length - 1 , urx = urx ) return re . match ( rx , s )
parse a string into a list of a uniform type
206
10
7,931
def parse_numtuple ( s , intype , length = 2 , scale = 1 ) : if intype == int : numrx = intrx_s elif intype == float : numrx = fltrx_s else : raise NotImplementedError ( "Not implemented for type: {}" . format ( intype ) ) if parse_utuple ( s , numrx , length = length ) is None : raise ValueError ( "{} is not a valid number tuple." . format ( s ) ) return [ x * scale for x in evalt ( s ) ]
parse a string into a list of numbers of a type
124
11
7,932
def parse_ctuple ( s , length = 2 ) : if parse_utuple ( s , colrx_s , length = length ) is None : raise ValueError ( "{} is not a valid color tuple." . format ( s ) ) #quote strings s = quote_subs ( s , colorfix = True ) return evalt ( s )
parse a string of acceptable colors into matplotlib that is either strings or three tuples of rgb . Don t quote strings .
75
26
7,933
def parse_stuple ( s , length = 2 ) : if parse_utuple ( s , isrx_s , length = length ) is None : raise ValueError ( "{} is not a valid string tuple." . format ( s ) ) s = quote_subs ( s ) return evalt ( s )
parse a string of strings . Don t quote strings
67
10
7,934
def parse_colors ( s , length = 1 ) : if length and length > 1 : return parse_ctuple ( s , length = length ) if re . match ( '^ *{} *$' . format ( isrx_s ) , s ) : #it's just a string. return [ s ] elif re . match ( '^ *{} *$' . format ( rgbrx_s ) , s ) : return [ eval ( s ) ] else : return parse_ctuple ( s , length = length )
helper for parsing a string that can be either a matplotlib color or be a tuple of colors . Returns a tuple of them either way .
116
30
7,935
def parse_qs ( s , rx , parsef = None , length = 2 , quote = False ) : if type ( rx ) != str : rx = rx . pattern if re . match ( " *\(.*\)" , s ) : if not parsef : if parse_utuple ( s , rx , length = length ) : if quote : s = quote_subs ( s ) return evalt ( s ) else : raise ValueError ( "{} did is not a valid tuple of {}" . format ( s , rx ) ) else : return parsef ( s , length = length ) elif re . match ( '^ *{} *$' . format ( rx ) , s ) : if quote : return eval ( '["{}"]' . format ( s ) ) return eval ( '[{}]' . format ( s ) ) else : raise ValueError ( "{} does not match '{}' or the passed parsef" . format ( s , rx ) )
helper for parsing a string that can both rx or parsef which is obstensibly the parsef for rx .
218
25
7,936
def sd ( d , * * kw ) : #HURR SO COMPLICATED r = { } #copy. if you want to modify, r . update ( d ) #use {}.update r . update ( kw ) return r
A hack to return a modified dict dynamically . Basically Does classless OOP as in js but with dicts although not really for the verb parts of OOP but more of the subject stuff .
51
39
7,937
def mk_getkw ( kw , defaults , prefer_passed = False ) : def getkw ( * ls ) : r = [ kw [ l ] if test ( kw , l ) else defaults [ l ] for l in ls ] if len ( r ) == 1 : return r [ 0 ] return r def getkw_prefer_passed ( * ls ) : r = [ kw [ l ] if l in kw else defaults [ l ] for l in ls ] if len ( r ) == 1 : return r [ 0 ] return r return getkw if not prefer_passed else getkw_prefer_passed
a helper for generating a function for reading keywords in interface functions with a dictionary with defaults
138
17
7,938
def _load_resource ( self ) : url = self . _url if self . _params : url += '?{0}' . format ( six . moves . urllib_parse . urlencode ( self . _params ) ) r = getattr ( self . _session , self . _meta . get_method . lower ( ) ) ( url ) if r . status_code == 404 : raise NotFoundException ( 'Server returned 404 Not Found for the URL {0}' . format ( self . _url ) ) elif not 200 <= r . status_code < 400 : raise HTTPException ( 'Server returned {0} ({1})' . format ( r . status_code , r . reason ) , r ) data = self . _meta . deserializer . to_dict ( r . text ) self . populate_field_values ( data )
Load resource data from server
186
5
7,939
def populate_field_values ( self , data ) : if not self . _meta . case_sensitive_fields : data = { k . lower ( ) : v for k , v in six . iteritems ( data ) } if self . _meta . match_fuzzy_keys : # String any non-alphanumeric chars from each key data = { '' . join ( x for x in k if x in ALPHANUMERIC ) . lower ( ) : v for k , v in six . iteritems ( data ) } for field in self . _meta . fields : name = field . name if self . _meta . case_sensitive_fields else field . name . lower ( ) value = None if self . _meta . match_fuzzy_keys : name = '' . join ( x for x in name if x in ALPHANUMERIC ) . lower ( ) if name in data : value = field . to_python ( data [ name ] , self ) elif field . required and field . default is None : message = "Response from {0} is missing required field '{1}'" . format ( self . _url , field . name ) if self . _strict : raise MissingFieldException ( message ) else : logger . warn ( message ) elif field . default is not None : value = copy . copy ( field . default ) setattr ( self , field . _attr_name , value ) self . _populated_field_values = True
Load resource data and populate field values
318
7
7,940
def close_thread ( self ) : if self . __thread is not None and self . __thread . is_alive ( ) is True : raise WThreadJoiningTimeoutError ( 'Thread is still alive. Thread name: %s' % self . __thread . name ) self . start_event ( ) . clear ( ) self . __thread = None
Clear all object descriptors for stopped task . Task must be joined prior to calling this method .
76
19
7,941
def trace_module ( no_print = True ) : pwd = os . path . dirname ( __file__ ) script_name = os . path . join ( pwd , "test_my_module.py" ) with pexdoc . ExDocCxt ( ) as exdoc_obj : if pytest . main ( [ "-s" , "-vv" , "-x" , "{0}" . format ( script_name ) ] ) : raise RuntimeError ( "Tracing did not complete successfully" ) if not no_print : module_prefix = "docs.support.my_module." callable_names = [ "func" , "MyClass.value" ] for callable_name in callable_names : callable_name = module_prefix + callable_name print ( "\nCallable: {0}" . format ( callable_name ) ) print ( exdoc_obj . get_sphinx_doc ( callable_name , width = 70 ) ) print ( "\n" ) return copy . copy ( exdoc_obj )
Trace my_module exceptions .
231
7
7,942
def header_name_check ( header_name ) : header_match = WHTTPHeaders . header_name_re . match ( header_name . encode ( 'us-ascii' ) ) return len ( header_name ) > 0 and header_match is not None
Check header name for validity . Return True if name is valid
60
12
7,943
def remove_headers ( self , header_name ) : if self . __ro_flag : raise RuntimeError ( 'ro' ) header_name = self . normalize_name ( header_name ) if header_name in self . __headers . keys ( ) : self . __headers . pop ( header_name )
Remove header by its name
68
5
7,944
def add_headers ( self , header_name , value , * values ) : if self . __ro_flag : raise RuntimeError ( 'ro' ) header_name = self . normalize_name ( header_name ) if header_name not in self . __headers . keys ( ) : self . __headers [ header_name ] = [ value ] else : self . __headers [ header_name ] . append ( value ) for single_value in values : self . __headers [ header_name ] . append ( single_value )
Add new header
115
3
7,945
def get_headers ( self , header_name ) : header_name = self . normalize_name ( header_name ) if header_name in self . __headers . keys ( ) : return tuple ( self . __headers [ header_name ] )
Return header value by its name
54
6
7,946
def switch_name_style ( self , http_protocol_version ) : new_headers = WHTTPHeaders ( ) new_headers . __normalization_mode = http_protocol_version names = self . headers ( ) for name in names : new_headers . add_headers ( name , * self . get_headers ( name ) ) for cookie_name in self . __set_cookies . cookies ( ) : new_headers . __set_cookies . add_cookie ( self . __set_cookies [ cookie_name ] . copy ( ) ) return new_headers
Return object copy with header names saved as it is described in the given protocol version
127
16
7,947
def ro ( self ) : ro_headers = WHTTPHeaders ( ) names = self . headers ( ) for name in names : ro_headers . add_headers ( name , * self . get_headers ( name ) ) ro_headers . __cookies = self . __set_cookies . ro ( ) ro_headers . __ro_flag = True return ro_headers
Return read - only copy of this object
81
8
7,948
def client_cookie_jar ( self ) : cookie_jar = WHTTPCookieJar ( ) cookie_header = self . get_headers ( 'Cookie' ) for cookie_string in ( cookie_header if cookie_header is not None else tuple ( ) ) : for single_cookie in WHTTPCookieJar . import_header_text ( cookie_string ) : cookie_jar . add_cookie ( single_cookie ) return cookie_jar . ro ( )
Return internal cookie jar that must be used as HTTP - request cookies
102
13
7,949
def import_headers ( cls , http_code ) : headers = WHTTPHeaders ( ) message = email . message_from_file ( StringIO ( http_code ) ) for header_name , header_value in message . items ( ) : headers . add_headers ( header_name , header_value ) cookie_header = headers . get_headers ( 'Set-Cookie' ) if cookie_header is not None : for cookie_string in cookie_header : for single_cookie in WHTTPCookieJar . import_header_text ( cookie_string ) : headers . set_cookie_jar ( ) . add_cookie ( single_cookie ) headers . remove_headers ( 'Set-Cookie' ) return headers
Create WHTTPHeaders by the given code . If code has Set - Cookie headers that headers are parsed data are stored in internal cookie jar . At the end of parsing Set - Cookie headers are removed from the result
158
43
7,950
def trace_module ( no_print = True ) : with pexdoc . ExDocCxt ( ) as exdoc_obj : try : docs . support . my_module . func ( "John" ) obj = docs . support . my_module . MyClass ( ) obj . value = 5 obj . value except : raise RuntimeError ( "Tracing did not complete successfully" ) if not no_print : module_prefix = "docs.support.my_module." callable_names = [ "func" , "MyClass.value" ] for callable_name in callable_names : callable_name = module_prefix + callable_name print ( "\nCallable: {0}" . format ( callable_name ) ) print ( exdoc_obj . get_sphinx_doc ( callable_name , width = 70 ) ) print ( "\n" ) return copy . copy ( exdoc_obj )
Trace my_module_original exceptions .
202
9
7,951
def ro ( self ) : request = WWebRequest ( self . session ( ) , self . method ( ) , self . path ( ) , headers = self . headers ( ) . ro ( ) , request_data = self . request_data ( ) ) request . __ro_flag = True return request
Create read - only copy
64
5
7,952
def simple_contact ( request , username = "" ) : site = Site . objects . get_current ( ) form = ContactForm ( request . POST or None ) UserModel = get_user_model ( ) recipients = [ ] site_form = False logger . debug ( 'Recipients should be empty: %s' % recipients ) # if we know, fill in the user name and email if request . user . is_authenticated : # first, resolve username for tango and non-tango sites try : name = request . user . display_name except AttributeError : name = request . user . username form . fields [ 'sender_name' ] . widget . attrs [ 'readonly' ] = 'true' form . fields [ 'sender_name' ] . initial = name form . fields [ 'sender_email' ] . widget . attrs [ 'readonly' ] = 'true' form . fields [ 'sender_email' ] . initial = request . user . email if username : member = get_object_or_404 ( UserModel , username = username ) recipients = [ member . email , ] logger . debug ( 'Recipients should be a single user: %s' % recipients ) else : # site contact form. # Use first of settings.DEFAULT_CONTACTS or all superusers site_form = True member = None recipients = getattr ( settings , "DEFAULT_CONTACTS" , None ) logger . debug ( 'Recipients should be match DEFAULT_CONTACTS: %s' % recipients ) if not recipients : recipients = UserModel . objects . filter ( is_superuser = True ) . values_list ( 'email' , flat = True ) warnings . warn ( "settings.DEFAULT_CONTACTS does not exist. You may want to create it." , RuntimeWarning ) logger . debug ( 'Recipients should be superusers: %s' % recipients ) if form . is_valid ( ) : if site_form : subject = "A {} contact form submission from {}" . format ( site . name , form . cleaned_data [ 'sender_name' ] ) else : subject = "A message from {} on {}" . format ( form . cleaned_data [ 'sender_name' ] , site . name ) body = form . cleaned_data [ 'body' ] sender_email = form . cleaned_data [ 'sender_email' ] if 'send_a_copy' in request . POST : recipients . append ( sender_email ) logger . debug ( 'Recipients should be match prior + sender email: %s' % recipients ) mail = EmailMessage ( subject = subject , body = body , from_email = sender_email , to = recipients ) mail . send ( ) return HttpResponseRedirect ( success_url ) return render ( request , 'contact/simple_form.html' , { 'form' : form , 'site' : site , 'member' : member } )
Defines simple contact form that can be used to contact a site member passed by username in the URL or to all superusers or to a list defined in settings . DEFAULT_CONTACTS .
644
40
7,953
def build_contact ( request , slug = "" ) : controller = get_object_or_404 ( ContactFormController , slug = slug ) site = Site . objects . get_current ( ) UserModel = get_user_model ( ) user = request . user form = ContactForm ( request . POST or None , request . FILES or None , controller = controller ) # if we know, fill in the user name and email if user . is_authenticated : # first, resolve username for tango and non-tango sites try : name = user . display_name except AttributeError : name = user . username form . fields [ 'sender_name' ] . widget . attrs [ 'readonly' ] = 'true' form . fields [ 'sender_name' ] . initial = name form . fields [ 'sender_email' ] . widget . attrs [ 'readonly' ] = 'true' form . fields [ 'sender_email' ] . initial = user . email if form . is_valid ( ) : if controller . store_in_db : # To do: sanitize submission. new_msg = Contact ( * * form . cleaned_data ) new_msg . controller = controller new_msg . site = site if controller . override_subject : # we're overriding the subject new_msg . subject = controller . override_subject new_msg . save ( ) if controller . send_emails : form_data = form . cleaned_data if controller . override_subject : subject = controller . override_subject elif 'subject' in form_data : subject = form_data [ 'subject' ] else : subject = "{} message from {}" . format ( controller . name , form_data [ 'sender_name' ] ) body = "{} \n\n {}" . format ( form_data [ 'body' ] , form_data [ 'sender_name' ] ) if controller . request_contact_info : body += "\nAddress: {} \nCity: {} \nState: {} \nPhone: {}" . format ( form_data [ 'contact_address' ] , form_data [ 'contact_city' ] , form_data [ 'contact_state' ] , form_data [ 'contact_phone' ] ) if controller . email_options == '2' : # Create selectable list from recipients try : to = [ UserModel . objects . get ( username = form . cleaned_data [ 'to' ] ) . email ] except Exception : to = [ form . cleaned_data [ 'to' ] ] if controller . email_options == '1' : to = [ r . email for r in controller . recipients . all ( ) ] for r in controller . other_recipients . all ( ) : to . append ( r . email ) if 'send_a_copy' in form . cleaned_data : to . append ( form . cleaned_data [ 'sender_email' ] ) mail = EmailMessage ( subject = subject , body = body , from_email = form . cleaned_data [ 'sender_email' ] , to = to ) if 'photo' in request . FILES : photo = request . FILES [ 'photo' ] mail . attach ( photo . name , photo . read ( ) , photo . content_type ) mail . send ( ) return render ( request , 'success_url' , { 'controller' : controller } ) return render ( request , 'contact/form.html' , { 'form' : form , 'site' : site , 'controller' : controller } )
Builds appropriate contact form based on options set in the contact_form controller .
774
16
7,954
def pale_webapp2_request_handler_generator ( pale_endpoint ) : def pale_handler ( self , * args , * * kwargs ) : if self . request . method == "OPTIONS" : origin = self . request . headers . get ( "Origin" , None ) self . response . headers [ 'Access-Control-Allow-Origin' ] = origin self . response . headers [ 'Access-Control-Allow-Headers' ] = 'Origin, X-Requested-With, Content-Type, Accept' self . response . headers [ 'Access-Control-Allow-Methods' ] = 'POST, GET, PUT, DELETE' self . response . headers [ 'Access-Control-Allow-Credentials' ] = 'true' return self . response try : return pale_endpoint . _execute ( self . request ) finally : pale_endpoint . _finally ( ) cls = type ( pale_endpoint . _route_name , ( webapp2 . RequestHandler , ) , dict ( pale_handler = pale_handler ) ) return cls
Generate a webapp2 . RequestHandler class for the pale endpoint .
239
15
7,955
def bind_pale_to_webapp2 ( pale_app_module , webapp_wsgiapplication , route_prefix = None ) : if not isinstance ( webapp_wsgiapplication , webapp2 . WSGIApplication ) : raise TypeError ( "pale.adapters.webapp2.bind_pale_to_webapp2 expected " "the passed in webapp_wsgiapplication to be an instance of " "WSGIApplication, but it was an instance of %s instead." % ( type ( webapp_wsgiapplication ) , ) ) if not pale . is_pale_module ( pale_app_module ) : raise TypeError ( "pale.adapters.webapp2.bind_pale_to_webapp2 expected " "the passed in pale_app_module to be a Python module with a " "`_module_type` value equal to `pale.ImplementationModule`, " "but it found an instance of %s instead." % ( type ( pale_app_module ) , ) ) endpoints = pale . extract_endpoints ( pale_app_module ) for endpoint in endpoints : endpoint . _set_response_class ( RESPONSE_CLASS ) method = endpoint . _http_method name = endpoint . _route_name req_handler = pale_webapp2_request_handler_generator ( endpoint ) route_uri = endpoint . _uri if route_prefix is not None : route_uri = "%s%s" % ( route_prefix , route_uri ) route = webapp2 . Route ( route_uri , handler = req_handler , name = name , handler_method = 'pale_handler' , methods = [ method , "OPTIONS" ] ) webapp_wsgiapplication . router . add ( route )
Binds a Pale API implementation to a webapp2 WSGIApplication
409
16
7,956
def encode ( self , envelope , session , target = None , modification_code = None , * * kwargs ) : self . __args_check ( envelope , target , modification_code ) if isinstance ( envelope , WMessengerTextEnvelope ) : target_envelope_cls = WMessengerTextEnvelope else : # isinstance(envelope, WMessengerBytesEnvelope) target_envelope_cls = WMessengerBytesEnvelope if target == WMessengerFixedModificationLayer . Target . head : return target_envelope_cls ( modification_code + envelope . message ( ) , meta = envelope ) else : # target == WMessengerFixedModificationLayer.Target.tail return target_envelope_cls ( envelope . message ( ) + modification_code , meta = envelope )
Methods appends modification_code to the specified envelope .
185
11
7,957
def decode ( self , envelope , session , target = None , modification_code = None , * * kwargs ) : self . __args_check ( envelope , target , modification_code ) message = envelope . message ( ) if len ( message ) < len ( modification_code ) : raise ValueError ( 'Invalid message length' ) if isinstance ( envelope , WMessengerTextEnvelope ) : target_envelope_cls = WMessengerTextEnvelope else : # isinstance(envelope, WMessengerBytesEnvelope) target_envelope_cls = WMessengerBytesEnvelope if target == WMessengerFixedModificationLayer . Target . head : if message [ : len ( modification_code ) ] != modification_code : raise ValueError ( 'Invalid header in message' ) return target_envelope_cls ( message [ len ( modification_code ) : ] , meta = envelope ) else : # target == WMessengerFixedModificationLayer.Target.tail if message [ - len ( modification_code ) : ] != modification_code : raise ValueError ( 'Invalid tail in message' ) return target_envelope_cls ( message [ : - len ( modification_code ) ] , meta = envelope )
Methods checks envelope for modification_code existence and removes it .
273
12
7,958
def started_tasks ( self , task_registry_id = None , task_cls = None ) : if task_registry_id is not None : task = None for registered_task in self . __started : if registered_task . __registry_tag__ == task_registry_id : task = registered_task if task_cls is not None and task is not None : if isinstance ( task , task_cls ) is True : return task return None return task result = filter ( lambda x : x is not None , self . __started ) if task_cls is not None : result = filter ( lambda x : isinstance ( x , task_cls ) , result ) return tuple ( result )
Return tasks that was started . Result way be filtered by the given arguments .
158
15
7,959
def stop_task ( self , task_tag , stop_dependent = True , stop_requirements = False ) : # TODO: "coverage" requires more tests task = self . started_tasks ( task_registry_id = task_tag ) if task is None : return def stop ( task_to_stop ) : if task_to_stop in self . __started : if isinstance ( task_to_stop , WStoppableTask ) is True : task_to_stop . stop ( ) self . __started . remove ( task_to_stop ) def stop_dependency ( task_to_stop ) : deeper_dependencies = [ ] for dependent_task in self . __started : if task_to_stop . __registry_tag__ in dependent_task . __class__ . __dependency__ : deeper_dependencies . append ( dependent_task ) for dependent_task in deeper_dependencies : stop_dependency ( dependent_task ) stop ( task_to_stop ) def calculate_requirements ( task_to_stop , cross_requirements = False ) : requirements = set ( ) for dependent_task in self . __started : if dependent_task . __class__ . __registry_tag__ in task_to_stop . __class__ . __dependency__ : requirements . add ( dependent_task ) if cross_requirements is True : return requirements result = set ( ) for task_a in requirements : requirement_match = False for task_b in requirements : if task_a . __class__ . __registry_tag__ in task_b . __class__ . __dependency__ : requirement_match = True break if requirement_match is False : result . add ( task_a ) return result def calculate_priorities ( task_to_stop , * extra_tasks , current_result = None , requirements_left = None ) : if current_result is None : current_result = [ ] tasks_to_stop = [ task_to_stop ] if len ( extra_tasks ) > 0 : tasks_to_stop . extend ( extra_tasks ) current_result . append ( list ( tasks_to_stop ) ) all_requirements = calculate_requirements ( tasks_to_stop [ 0 ] , cross_requirements = True ) nested_requirements = calculate_requirements ( tasks_to_stop [ 0 ] ) for dependent_task in tasks_to_stop [ 1 : ] : nested_requirements = nested_requirements . union ( calculate_requirements ( dependent_task ) ) all_requirements . update ( calculate_requirements ( dependent_task , cross_requirements = True ) ) all_requirements = all_requirements . difference ( nested_requirements ) if requirements_left is not None : requirements_left = requirements_left . difference ( all_requirements ) nested_requirements . update ( requirements_left ) if len ( nested_requirements ) == 0 : return current_result return calculate_priorities ( * list ( nested_requirements ) , current_result = current_result , requirements_left = all_requirements ) if stop_dependent is True : stop_dependency ( task ) if stop_requirements is True : for task_list in calculate_priorities ( task ) : for single_task in task_list : stop ( single_task ) if stop_dependent is not True : # check if we've already stopped this task stop ( task )
Stop task with the given task tag . If task already stopped then nothing happens .
753
16
7,960
def start_task ( cls , task_tag , skip_unresolved = False ) : registry = cls . registry_storage ( ) registry . start_task ( task_tag , skip_unresolved = skip_unresolved )
Start task from registry
53
4
7,961
def stop_task ( cls , task_tag , stop_dependent = True , stop_requirements = False ) : registry = cls . registry_storage ( ) registry . stop_task ( task_tag , stop_dependent = stop_dependent , stop_requirements = stop_requirements )
Stop started task from registry
64
5
7,962
def snip_this ( tag = "" , write_date = True ) : snip ( tag = tag , start = - 1 , write_date = write_date )
When this function is invoced in a notebook cell the cell is snipped .
37
17
7,963
def unsnip ( tag = None , start = - 1 ) : import IPython i = IPython . get_ipython ( ) if tag in _tagged_inputs . keys ( ) : if len ( _tagged_inputs [ tag ] ) > 0 : i . set_next_input ( _tagged_inputs [ tag ] [ start ] ) else : if len ( _last_inputs ) > 0 : i . set_next_input ( _last_inputs [ start ] )
This function retrieves a tagged or untagged snippet .
111
11
7,964
def alert ( msg , body = "" , icon = None ) : if type ( body ) == str : body = body [ : 200 ] if call ( [ "which" , "notify-send" ] ) == 0 : if icon is not None : call ( [ "notify-send" , msg , "-i" , icon , body ] ) else : call ( [ "notify-send" , msg , body ] ) else : print ( ( "ALERT: " , msg ) )
alerts the user of something happening via notify - send . If it is not installed the alert will be printed to the console .
106
26
7,965
def recgen ( gen , fix_type_errors = True ) : if not hasattr ( gen , '__iter__' ) : yield gen else : try : for i in gen : for ii in recgen ( i ) : yield ii except TypeError : # oops, it seems it was not an iterable even if it had an __iter__ method... # this happens eg. with theano tensor variables as they try to trick you to sum them. if not fix_type_errors : raise # maybe you want this Exception? yield gen
Iterates through generators recursively and flattens them .
116
12
7,966
def list_of_dicts_to_dict_of_lists ( list_of_dictionaries ) : result = { } all_keys = set ( [ k for d in list_of_dictionaries for k in d . keys ( ) ] ) for d in list_of_dictionaries : for k in all_keys : result . setdefault ( k , [ ] ) . append ( d . get ( k , None ) ) return result
Takes a list of dictionaries and creates a dictionary with the combined values for each key in each dicitonary . Missing values are set to None for each dicitonary that does not contain a key that is present in at least one other dicitonary .
99
56
7,967
def dict_of_lists_to_list_of_dicts ( dictionary_of_lists ) : return [ { key : dictionary_of_lists [ key ] [ index ] if len ( dictionary_of_lists [ key ] ) > index else None for key in dictionary_of_lists . keys ( ) } for index in range ( max ( map ( len , dictionary_of_lists . values ( ) ) ) ) ]
Takes a dictionary of lists and creates a list of dictionaries . If the lists are of unequal length the remaining entries are set to None .
92
29
7,968
def colorate ( sequence , colormap = "" , start = 0 , length = None ) : n = start colors = color_space ( colormap , sequence , start = 0.1 , stop = 0.9 , length = length ) for elem in sequence : yield n , colors [ n - start ] , elem n += 1
like enumerate but with colors
73
6
7,969
def generate ( self , * * kwargs ) : import collections all_params = cartesian_dicts ( { k : kwargs [ k ] for k in kwargs . keys ( ) if isinstance ( kwargs [ k ] , collections . Iterable ) } ) for pi , p in enumerate ( all_params ) : if self . name_mode == 'int' : n = str ( len ( self . containers ) ) else : n = None self . containers . append ( PDContainer ( name = n , params = p , parent = self ) ) self . parameters . update ( { k : kwargs [ k ] for k in kwargs . keys ( ) if not isinstance ( kwargs [ k ] , collections . Iterable ) } ) self . save ( )
run once to create all children containers for each combination of the keywords
172
13
7,970
def param ( self , key , default = None ) : if key in self . parameters : return self . parameters [ key ] return default
for accessing global parameters
28
4
7,971
def generator ( self , gen , * args , * * kwargs ) : with self ( * args , * * kwargs ) : for i in gen : yield i
Use this function to enter and exit the context at the beginning and end of a generator .
37
18
7,972
def validate_url ( self , original_string ) : # nipped from stack overflow: http://stackoverflow.com/questions/827557/how-do-you-validate-a-url-with-a-regular-expression-in-python # I preferred this to the thorough regex approach for simplicity and # readability pieces = urlparse . urlparse ( original_string ) try : if self . path_only : assert not any ( [ pieces . scheme , pieces . netloc ] ) assert pieces . path else : assert all ( [ pieces . scheme , pieces . netloc ] ) valid_chars = set ( string . letters + string . digits + ":-_." ) assert set ( pieces . netloc ) <= valid_chars assert pieces . scheme in [ 'http' , 'https' ] except AssertionError as e : raise ArgumentError ( self . item_name , "The input you've provided is not a valid URL." ) return pieces
Returns the original string if it was valid raises an argument error if it s not .
209
17
7,973
def get_chapter ( self , book_name , book_chapter , cache_chapter = True ) : try : logging . debug ( "Attempting to read chapter from disk" ) verses_list = self . _get_ondisk_chapter ( book_name , book_chapter ) except Exception as e : logging . debug ( "Could not read file from disk. Attempting the internet.." ) logging . debug ( e . message ) verses_list = self . _get_online_chapter ( book_name , book_chapter , cache_chapter = cache_chapter ) return verses_list
Returns a chapter of the bible first checking to see if that chapter is on disk . If not hen it attempts to fetch it from the internet .
125
29
7,974
def verse_lookup ( self , book_name , book_chapter , verse , cache_chapter = True ) : verses_list = self . get_chapter ( book_name , str ( book_chapter ) , cache_chapter = cache_chapter ) return verses_list [ int ( verse ) - 1 ]
Looks up a verse from online . recoveryversion . bible then returns it .
66
15
7,975
def validate_on_submit ( self ) : # validate form valid = FlaskWtf . validate_on_submit ( self ) # return in case no schema or not submitted if not self . _schema or not self . is_submitted ( ) : return valid # validate data with schema if got one and form was submitted data = dict ( ) for field in self . _fields : data [ field ] = self . _fields [ field ] . data result = self . schema . process ( data , context = self . _force_context ) self . set_errors ( result ) # set filtered data back to form for field in data : self . _fields [ field ] . data = data [ field ] return valid and not bool ( self . errors )
Extend validate on submit to allow validation with schema
158
10
7,976
def set_errors ( self , result ) : # todo: use wtf locale errors = result . get_messages ( ) for property_name in errors : if not hasattr ( self , property_name ) : continue # ignore errors for missing fields prop_errors = errors [ property_name ] if type ( prop_errors ) is not list : prop_errors = [ '<Nested schema result following...>' ] if property_name in self . errors : self . errors [ property_name ] . extend ( prop_errors ) else : self . errors [ property_name ] = prop_errors
Populate field errors with errors from schema validation
129
9
7,977
def diffs2persistence ( rev_docs , window_size = 50 , revert_radius = 15 , sunset = None , verbose = False ) : rev_docs = mwxml . utilities . normalize ( rev_docs ) window_size = int ( window_size ) revert_radius = int ( revert_radius ) sunset = Timestamp ( sunset ) if sunset is not None else Timestamp ( time . time ( ) ) # Group the docs by page page_docs = groupby ( rev_docs , key = lambda d : d [ 'page' ] [ 'title' ] ) for page_title , rev_docs in page_docs : if verbose : sys . stderr . write ( page_title + ": " ) # We need a look-ahead to know how long this revision was visible rev_docs = peekable ( rev_docs ) # The window allows us to manage memory window = deque ( maxlen = window_size ) # The state does the actual processing work state = DiffState ( revert_radius = revert_radius ) while rev_docs : rev_doc = next ( rev_docs ) next_doc = rev_docs . peek ( None ) if next_doc is not None : seconds_visible = Timestamp ( next_doc [ 'timestamp' ] ) - Timestamp ( rev_doc [ 'timestamp' ] ) else : seconds_visible = sunset - Timestamp ( rev_doc [ 'timestamp' ] ) if seconds_visible < 0 : logger . warn ( "Seconds visible {0} is less than zero." . format ( seconds_visible ) ) seconds_visible = 0 _ , tokens_added , _ = state . update_opdocs ( rev_doc [ 'sha1' ] , rev_doc [ 'diff' ] [ 'ops' ] , ( rev_doc [ 'user' ] , seconds_visible ) ) if len ( window ) == window_size : # Time to start writing some stats old_doc , old_added = window [ 0 ] window . append ( ( rev_doc , tokens_added ) ) persistence = token_persistence ( old_doc , old_added , window , None ) old_doc [ 'persistence' ] = persistence yield old_doc if verbose : sys . stderr . write ( "." ) sys . stderr . flush ( ) else : window . append ( ( rev_doc , tokens_added ) ) while len ( window ) > 0 : old_doc , old_added = window . popleft ( ) persistence = token_persistence ( old_doc , old_added , window , sunset ) old_doc [ 'persistence' ] = persistence yield old_doc if verbose : sys . stderr . write ( "_" ) sys . stderr . flush ( ) if verbose : sys . stderr . write ( "\n" )
Processes a sorted and page - partitioned sequence of revision documents into and adds a persistence field to them containing statistics about how each token added in the revision persisted through future revisions .
617
36
7,978
def generator ( name ) : name = name . upper ( ) if name not in WHash . __hash_map__ . keys ( ) : raise ValueError ( 'Hash generator "%s" not available' % name ) return WHash . __hash_map__ [ name ]
Return generator by its name
58
5
7,979
def generator_by_digest ( family , digest_size ) : for generator_name in WHash . available_generators ( family = family ) : generator = WHash . generator ( generator_name ) if generator . generator_digest_size ( ) == digest_size : return generator raise ValueError ( 'Hash generator is not available' )
Return generator by hash generator family name and digest size
74
10
7,980
def sequence ( cls , * info ) : if len ( info ) == 0 : return info = list ( info ) info . reverse ( ) result = WMessengerOnionSessionFlowProto . Iterator ( info [ 0 ] . layer_name ( ) , * * info [ 0 ] . layer_args ( ) ) for i in range ( 1 , len ( info ) ) : result = WMessengerOnionSessionFlowProto . Iterator ( info [ i ] . layer_name ( ) , next_iterator = result , * * info [ i ] . layer_args ( ) ) return result
Useful method to generate iterator . It is generated by chaining the given info . If no info is specified then None is returned
129
26
7,981
def from_string ( address ) : str_address = None if WMACAddress . re_dash_format . match ( address ) : str_address = "" . join ( address . split ( "-" ) ) elif WMACAddress . re_colon_format . match ( address ) : str_address = "" . join ( address . split ( ":" ) ) elif WMACAddress . re_cisco_format . match ( address ) : str_address = "" . join ( address . split ( "." ) ) elif WMACAddress . re_spaceless_format . match ( address ) : str_address = address if str_address is None : raise ValueError ( "Invalid MAC address format: " + address ) result = WMACAddress ( ) for octet_index in range ( WMACAddress . octet_count ) : octet = str_address [ : 2 ] result . __address [ octet_index ] = int ( octet , 16 ) str_address = str_address [ 2 : ] return result
Return new object by the given MAC - address
224
9
7,982
def from_string ( address ) : address = address . split ( '.' ) if len ( address ) != WIPV4Address . octet_count : raise ValueError ( 'Invalid ip address: %s' % address ) result = WIPV4Address ( ) for i in range ( WIPV4Address . octet_count ) : result . __address [ i ] = WBinArray ( int ( address [ i ] ) , WFixedSizeByteArray . byte_size ) return result
Parse string for IPv4 address
107
7
7,983
def to_string ( address , dns_format = False ) : if isinstance ( address , WIPV4Address ) is False : raise TypeError ( 'Invalid address type' ) address = [ str ( int ( x ) ) for x in address . __address ] if dns_format is False : return '.' . join ( address ) address . reverse ( ) return ( '.' . join ( address ) + '.in-addr.arpa' )
Convert address to string
98
5
7,984
def first_address ( self , skip_network_address = True ) : bin_address = self . __address . bin_address ( ) bin_address_length = len ( bin_address ) if self . __mask > ( bin_address_length - 2 ) : skip_network_address = False for i in range ( bin_address_length - self . __mask ) : bin_address [ self . __mask + i ] = 0 if skip_network_address : bin_address [ bin_address_length - 1 ] = 1 return WIPV4Address ( bin_address )
Return the first IP address of this network
127
8
7,985
def last_address ( self , skip_broadcast_address = True ) : bin_address = self . __address . bin_address ( ) bin_address_length = len ( bin_address ) if self . __mask > ( bin_address_length - 2 ) : skip_broadcast_address = False for i in range ( bin_address_length - self . __mask ) : bin_address [ self . __mask + i ] = 1 if skip_broadcast_address : bin_address [ bin_address_length - 1 ] = 0 return WIPV4Address ( bin_address )
Return the last IP address of this network
130
8
7,986
def iterator ( self , skip_network_address = True , skip_broadcast_address = True ) : return WNetworkIPV4Iterator ( self , skip_network_address , skip_broadcast_address )
Return iterator that can iterate over network addresses
46
9
7,987
def from_string ( address ) : if len ( address ) == 0 : return WFQDN ( ) if address [ - 1 ] == '.' : address = address [ : - 1 ] if len ( address ) > WFQDN . maximum_fqdn_length : raise ValueError ( 'Invalid address' ) result = WFQDN ( ) for label in address . split ( '.' ) : if isinstance ( label , str ) and WFQDN . re_label . match ( label ) : result . _labels . append ( label ) else : raise ValueError ( 'Invalid address' ) return result
Convert doted - written FQDN address to WFQDN object
134
16
7,988
def to_string ( address , leading_dot = False ) : if isinstance ( address , WFQDN ) is False : raise TypeError ( 'Invalid type for FQDN address' ) result = '.' . join ( address . _labels ) return result if leading_dot is False else ( result + '.' )
Return doted - written address by the given WFQDN object
70
14
7,989
def qteReparent ( self , parent ) : # Set the new parent. self . setParent ( parent ) # If this parent has a Qtmacs structure then query it for the # parent window, otherwise set the parent to None. try : self . _qteAdmin . parentWindow = parent . qteParentWindow ( ) except AttributeError : self . _qteAdmin . parentWindow = None # Sanity check: if parent : msg = 'Parent is neither None, nor does it have a' msg += 'qteParentWindow field --> bug' print ( msg )
Re - parent the applet .
123
7
7,990
def qteAddWidget ( self , widgetObj : QtGui . QWidget , isFocusable : bool = True , widgetSignature : str = None , autoBind : bool = True ) : # Add a Qtmacs data structure to the widget to allow their # event administration. Note that, in all likelihood, the # widget is an arbitrary Qt widget (eg. QLineEdit, # QPushButton, etc). widgetObj . _qteAdmin = QtmacsAdminStructure ( self , isFocusable = isFocusable ) widgetObj . _qteAdmin . appletID = self . _qteAdmin . appletID # Specify that this widget is not a QtmacsApplet. widgetObj . _qteAdmin . isQtmacsApplet = False # Remember the signature of the applet containing this widget. widgetObj . _qteAdmin . appletSignature = self . qteAppletSignature ( ) # Set the widget signature. If none was specified, use the # class name (eg. QLineEdit). if widgetSignature is None : widgetObj . _qteAdmin . widgetSignature = widgetObj . __class__ . __name__ else : widgetObj . _qteAdmin . widgetSignature = widgetSignature # For convenience, as it is otherwise difficult for the macro # programmer to determine the widget signature used by Qtmacs. # Note: the "wo" is only a shorthand to avoid too long lines. wo = widgetObj wo . qteSignature = wo . _qteAdmin . widgetSignature wo . qteSetKeyFilterPolicy = wo . _qteAdmin . qteSetKeyFilterPolicy del wo # Add the widget to the widgetList of this QtmacsApplet. # Important: this MUST happen before macros and key-bindings are loaded # and bound automatically (see code below) because the method to # bind the keys will verify that the widget exists in ``widgetList``. self . _qteAdmin . widgetList . append ( widgetObj ) # If a widget has a default key-bindings file then the global # dictionary ``default_widget_keybindings`` will contain its # name. default_bind = qte_global . default_widget_keybindings if autoBind and ( widgetObj . qteSignature in default_bind ) : # Shorthand. module_name = default_bind [ widgetObj . qteSignature ] # Import the module with the default key-bindings for the # current widget type. try : mod = importlib . import_module ( module_name ) except ImportError : msg = ( 'Module <b>{}</b> could not be imported.' . format ( module_name ) ) self . qteLogger . exception ( msg , stack_info = True ) return if hasattr ( mod , 'install_macros_and_bindings' ) : # By convention, the module has an # install_macros_and_bindings method. If an error # occurs intercept it, but do not abort the method # since the error only relates to a failed attempt to # apply default key-bindings, not to register the # widget (the main purpose of this method). try : mod . install_macros_and_bindings ( widgetObj ) except Exception : msg = ( '<b>install_macros_and_bindings</b> function' ' in <b>{}</b> did not execute properly.' ) msg = msg . format ( module_name ) self . qteLogger . error ( msg , stack_info = True ) else : msg = ( 'Module <b>{}</b> has no ' '<b>install_macros_and_bindings</b>' ' method' . format ( module_name ) ) self . qteLogger . error ( msg ) return widgetObj
Augment the standard Qt widgetObj with Qtmacs specific fields .
831
14
7,991
def qteSetAppletSignature ( self , signature : str ) : if '*' in signature : raise QtmacsOtherError ( 'The applet signature must not contain "*"' ) if signature == '' : raise QtmacsOtherError ( 'The applet signature must be non-empty' ) self . _qteAdmin . appletSignature = signature
Specify the applet signature .
79
7
7,992
def qteAutoremoveDeletedWidgets ( self ) : widget_list = self . _qteAdmin . widgetList deleted_widgets = [ _ for _ in widget_list if sip . isdeleted ( _ ) ] for widgetObj in deleted_widgets : self . _qteAdmin . widgetList . remove ( widgetObj )
Remove all widgets from the internal widget list that do not exist anymore according to SIP .
75
18
7,993
def qteSetWidgetFocusOrder ( self , widList : tuple ) : # A list with less than two entries cannot be re-ordered. if len ( widList ) < 2 : return # Housekeeping: remove non-existing widgets from the admin structure. self . qteAutoremoveDeletedWidgets ( ) # Remove all **None** widgets. widList = [ _ for _ in widList if _ is not None ] # Ensure that all widgets exist in the current applet. for wid in widList : if wid not in self . _qteAdmin . widgetList : msg = 'Cannot change focus order because some ' msg += 'widgets do not exist.' self . qteLogger . warning ( msg ) return # Remove all duplicates from the user supplied list. newList = [ widList [ 0 ] ] for wid in widList [ 1 : ] : if wid not in newList : newList . append ( wid ) # If the duplicate free list has only one entry then there is # nothing left to reorder. if len ( newList ) < 2 : return # The purpose of the code is the following: suppose # _qteAdmin.widgetList = [0,1,2,3,4,5] and newList=[2,5,1]. # Then change _qteAdmin.widgetList to [0,1,2,5,1,3,4]. Step # 1: remove all but the first widget in newList from # _qteAdmin.widgetList. for wid in newList [ 1 : ] : self . _qteAdmin . widgetList . remove ( wid ) # 2: re-insert the removed elements as a sequence again. startIdx = self . _qteAdmin . widgetList . index ( newList [ 0 ] ) + 1 for idx , wid in enumerate ( newList [ 1 : ] ) : self . _qteAdmin . widgetList . insert ( startIdx + idx , wid )
Change the focus order of the widgets in this applet .
423
12
7,994
def qteNextWidget ( self , numSkip : int = 1 , ofsWidget : QtGui . QWidget = None , skipVisible : bool = False , skipInvisible : bool = True , skipFocusable : bool = False , skipUnfocusable : bool = True ) : # Check type of input arguments. if not hasattr ( ofsWidget , '_qteAdmin' ) and ( ofsWidget is not None ) : msg = '<ofsWidget> was probably not added with <qteAddWidget>' msg += ' method because it lacks the <_qteAdmin> attribute.' raise QtmacsOtherError ( msg ) # Housekeeping: remove non-existing widgets from the admin structure. self . qteAutoremoveDeletedWidgets ( ) # Make a copy of the widget list. widList = list ( self . _qteAdmin . widgetList ) # Return immediately if the widget list is empty. The actual # return value is either self._qteActiveWidget (if it points # to a child widget of the current applet), or None. if not len ( widList ) : if qteGetAppletFromWidget ( self . _qteActiveWidget ) is self : return self . _qteActiveWidget else : return None if skipInvisible : # Remove all invisible widgets. widList = [ wid for wid in widList if wid . isVisible ( ) ] if skipVisible : # Remove all visible widgets. widList = [ wid for wid in widList if not wid . isVisible ( ) ] if skipFocusable : # Remove all visible widgets. widList = [ wid for wid in widList if not wid . _qteAdmin . isFocusable ] if skipUnfocusable : # Remove all unfocusable widgets. widList = [ wid for wid in widList if wid . _qteAdmin . isFocusable ] # Return immediately if the list is empty. This is typically # the case at startup before any applet has been added. if not len ( widList ) : return None # If no offset widget was given then use the currently active one. if ofsWidget is None : ofsWidget = self . _qteActiveWidget if ( ofsWidget is not None ) and ( numSkip == 0 ) : if qteIsQtmacsWidget ( ofsWidget ) : return ofsWidget # Determine the index of the offset widget; assume it is zero # if the widget does not exist, eg. if the currently active # applet is not part of the pruned widList list. try : ofsIdx = widList . index ( ofsWidget ) except ValueError : ofsIdx = 0 # Compute the index of the next widget and wrap around the # list if necessary. ofsIdx = ( ofsIdx + numSkip ) % len ( widList ) # Return the widget. return widList [ ofsIdx ]
Return the next widget in cyclic order .
626
9
7,995
def qteMakeWidgetActive ( self , widgetObj : QtGui . QWidget ) : # Void the active widget information. if widgetObj is None : self . _qteActiveWidget = None return # Ensure that this applet is an ancestor of ``widgetObj`` # inside the Qt hierarchy. if qteGetAppletFromWidget ( widgetObj ) is not self : msg = 'The specified widget is not inside the current applet.' raise QtmacsOtherError ( msg ) # If widgetObj is not registered with Qtmacs then simply declare # it active and return. if not hasattr ( widgetObj , '_qteAdmin' ) : self . _qteActiveWidget = widgetObj return # Do nothing if widgetObj refers to an applet. if widgetObj . _qteAdmin . isQtmacsApplet : self . _qteActiveWidget = None return # Housekeeping: remove non-existing widgets from the admin structure. self . qteAutoremoveDeletedWidgets ( ) # Verify the widget is registered for this applet. if widgetObj not in self . _qteAdmin . widgetList : msg = 'Widget is not registered for this applet.' self . qteLogger . error ( msg , stack_info = True ) self . _qteActiveWidget = None return # The focus manager in QtmacsMain will hand the focus to # whatever the _qteActiveWidget variable of the active applet # points to. self . qteSetWidgetFocusOrder ( ( self . _qteActiveWidget , widgetObj ) ) self . _qteActiveWidget = widgetObj
Give keyboard focus to widgetObj .
344
7
7,996
def split_key ( key ) : if key == KEY_SEP : return ( ) key_chunks = tuple ( key . strip ( KEY_SEP ) . split ( KEY_SEP ) ) if key_chunks [ 0 ] . startswith ( KEY_SEP ) : return ( key_chunks [ 0 ] [ len ( KEY_SEP ) : ] , ) + key_chunks [ 1 : ] else : return key_chunks
Splits a node key .
100
6
7,997
def set ( self , index , value = None , dir = False , ttl = None , expiration = None ) : if bool ( dir ) is ( value is not None ) : raise TypeError ( 'Choose one of value or directory' ) if ( ttl is not None ) is ( expiration is None ) : raise TypeError ( 'Both of ttl and expiration required' ) self . value = value if self . dir != dir : self . dir = dir self . nodes = { } if dir else None self . ttl = ttl self . expiration = expiration self . modified_index = index
Updates the node data .
126
6
7,998
def make_result ( self , result_class , node = None , prev_node = None , remember = True , key_chunks = None , notify = True , * * kwargs ) : def canonicalize ( node , * * kwargs ) : return None if node is None else node . canonicalize ( * * kwargs ) index = self . index result = result_class ( canonicalize ( node , * * kwargs ) , canonicalize ( prev_node , * * kwargs ) , index ) if not remember : return result self . history [ index ] = result_class ( canonicalize ( node , include_nodes = False ) , canonicalize ( prev_node , include_nodes = False ) , index ) key_chunks = key_chunks or split_key ( node . key ) asymptotic_key_chunks = ( key_chunks [ : x + 1 ] for x in xrange ( len ( key_chunks ) ) ) event_keys = [ ( False , key_chunks ) ] for _key_chunks in asymptotic_key_chunks : exact = _key_chunks == key_chunks self . indices . setdefault ( _key_chunks , [ ] ) . append ( ( index , exact ) ) event_keys . append ( ( True , _key_chunks ) ) if notify : for event_key in event_keys : try : event = self . events . pop ( event_key ) except KeyError : pass else : event . set ( ) return result
Makes an etcd result .
338
7
7,999
def connect ( self , host = 'localhost' ) : # Connect get_logger ( ) . info ( "Connecting to RabbitMQ server..." ) self . _conn = pika . BlockingConnection ( pika . ConnectionParameters ( host = host ) ) self . _channel = self . _conn . channel ( ) # Exchanger get_logger ( ) . info ( "Declaring topic exchanger {}..." . format ( self . exchange ) ) self . _channel . exchange_declare ( exchange = self . exchange , type = 'topic' ) # Create queue get_logger ( ) . info ( "Creating RabbitMQ queue..." ) result = self . _channel . queue_declare ( exclusive = True ) self . _queue_name = result . method . queue # Binding if self . listen_all : get_logger ( ) . info ( "Binding queue to exchanger {} (listen all)..." . format ( self . exchange ) ) self . _channel . queue_bind ( exchange = self . exchange , queue = self . _queue_name , routing_key = '*' ) else : for routing_key in self . topics : get_logger ( ) . info ( "Binding queue to exchanger {} " "with routing key {}..." . format ( self . exchange , routing_key ) ) self . _channel . queue_bind ( exchange = self . exchange , queue = self . _queue_name , routing_key = routing_key ) # Callback get_logger ( ) . info ( "Binding callback..." ) self . _channel . basic_consume ( self . _callback , queue = self . _queue_name , no_ack = True )
Connect to the server and set everything up .
365
9