idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
21,400 | def meta ( * bases , ** kwargs ) : metaclass = kwargs . get ( "metaclass" , type ) if not bases : bases = ( object , ) class NewMeta ( type ) : def __new__ ( mcls , name , mbases , namespace ) : if name : return metaclass . __new__ ( metaclass , name , bases , namespace ) return super ( NewMeta , mcls ) . __new__ ( mcls , "" , mbases , { } ) return NewMeta ( "" , tuple ( ) , { } ) | Allows unique syntax similar to Python 3 for working with metaclasses in both Python 2 and Python 3 . |
21,401 | def attack ( a , d , s ) : if isinstance ( s , collections . Iterable ) : it_s = iter ( s ) s = next ( it_s ) else : it_s = None m_a = 1. / a m_d = ( s - 1. ) / d len_a = int ( a + .5 ) len_d = int ( d + .5 ) for sample in xrange ( len_a ) : yield sample * m_a for sample in xrange ( len_d ) : yield 1. + sample * m_d if it_s is None : while True : yield s else : for s in it_s : yield s | Linear ADS fading attack stream generator useful to be multiplied with a given stream . |
21,402 | def ones ( dur = None ) : if dur is None or ( isinf ( dur ) and dur > 0 ) : while True : yield 1.0 for x in xrange ( int ( .5 + dur ) ) : yield 1.0 | Ones stream generator . You may multiply your endless stream by this to enforce an end to it . |
21,403 | def adsr ( dur , a , d , s , r ) : m_a = 1. / a m_d = ( s - 1. ) / d m_r = - s * 1. / r len_a = int ( a + .5 ) len_d = int ( d + .5 ) len_r = int ( r + .5 ) len_s = int ( dur + .5 ) - len_a - len_d - len_r for sample in xrange ( len_a ) : yield sample * m_a for sample in xrange ( len_d ) : yield 1. + sample * m_d for sample in xrange ( len_s ) : yield s for sample in xrange ( len_r ) : yield s + sample * m_r | Linear ADSR envelope . |
21,404 | def white_noise ( dur = None , low = - 1. , high = 1. ) : if dur is None or ( isinf ( dur ) and dur > 0 ) : while True : yield random . uniform ( low , high ) for x in xrange ( rint ( dur ) ) : yield random . uniform ( low , high ) | White noise stream generator . |
21,405 | def sinusoid ( freq , phase = 0. ) : for n in modulo_counter ( start = phase , modulo = 2 * pi , step = freq ) : yield sin ( n ) | Sinusoid based on the optimized math . sin |
21,406 | def impulse ( dur = None , one = 1. , zero = 0. ) : if dur is None or ( isinf ( dur ) and dur > 0 ) : yield one while True : yield zero elif dur >= .5 : num_samples = int ( dur - .5 ) yield one for x in xrange ( num_samples ) : yield zero | Impulse stream generator . |
21,407 | def karplus_strong ( freq , tau = 2e4 , memory = white_noise ) : return comb . tau ( 2 * pi / freq , tau ) . linearize ( ) ( zeros ( ) , memory = memory ) | Karplus - Strong digitar synthesis algorithm . |
21,408 | def normalize ( self ) : max_abs = max ( self . table , key = abs ) if max_abs == 0 : raise ValueError ( "Can't normalize zeros" ) return self / max_abs | Returns a new table with values ranging from - 1 to 1 reaching at least one of these unless there s no data . |
21,409 | def strategy ( self , * names , ** kwargs ) : def decorator ( func ) : keep_name = kwargs . pop ( "keep_name" , False ) if kwargs : key = next ( iter ( kwargs ) ) raise TypeError ( "Unknown keyword argument '{}'" . format ( key ) ) if not keep_name : func . __name__ = str ( names [ 0 ] ) self [ names ] = func return self return decorator | StrategyDict wrapping method for adding a new strategy . |
21,410 | def call_sphinx ( out_type , build_dir = "build" ) : sphinx_string = sphinx_template . format ( build_dir = build_dir , out_type = out_type ) if sphinx . main ( shlex . split ( sphinx_string ) ) != 0 : raise RuntimeError ( "Something went wrong while building '{0}'" . format ( out_type ) ) if out_type in make_target : make_string = make_template . format ( build_dir = build_dir , out_type = out_type , make_param = make_target [ out_type ] ) call ( shlex . split ( make_string ) ) | Call the sphinx - build for the given output type and the make when the target has this possibility . |
21,411 | def levinson_durbin ( acdata , order = None ) : if order is None : order = len ( acdata ) - 1 elif order >= len ( acdata ) : acdata = Stream ( acdata ) . append ( 0 ) . take ( order + 1 ) def inner ( a , b ) : return sum ( acdata [ abs ( i - j ) ] * ai * bj for i , ai in enumerate ( a . numlist ) for j , bj in enumerate ( b . numlist ) ) try : A = ZFilter ( 1 ) for m in xrange ( 1 , order + 1 ) : B = A ( 1 / z ) * z ** - m A -= inner ( A , z ** - m ) / inner ( B , B ) * B except ZeroDivisionError : raise ParCorError ( "Can't find next PARCOR coefficient" ) A . error = inner ( A , A ) return A | Solve the Yule - Walker linear system of equations . |
21,412 | def should_skip ( app , what , name , obj , skip , options ) : if name in [ "__doc__" , "__module__" , "__dict__" , "__weakref__" , "__abstractmethods__" ] or name . startswith ( "_abc_" ) : return True return False | Callback object chooser function for docstring documentation . |
21,413 | def setup ( app ) : app . connect ( 'autodoc-process-docstring' , lambda * args : pre_processor ( * args , namer = audiolazy_namer ) ) app . connect ( 'autodoc-skip-member' , should_skip ) | Just connects the docstring pre_processor and should_skip functions to be applied on all docstrings . |
21,414 | def newest_file ( file_iterable ) : return max ( file_iterable , key = lambda fname : os . path . getmtime ( fname ) ) | Returns the name of the newest file given an iterable of file names . |
21,415 | def overlap_correlation ( wnd , hop ) : return sum ( wnd * Stream ( wnd ) . skip ( hop ) ) / sum ( el ** 2 for el in wnd ) | Overlap correlation percent for the given overlap hop in samples . |
21,416 | def scalloping_loss ( wnd ) : return - dB20 ( abs ( sum ( wnd * cexp ( line ( len ( wnd ) , 0 , - 1j * pi ) ) ) ) / sum ( wnd ) ) | Positive number with the scalloping loss in dB . |
21,417 | def find_xdb_bin ( wnd , power = .5 , res = 1500 ) : spectrum = dB20 ( rfft ( wnd , res * len ( wnd ) ) ) root_at_xdb = spectrum - spectrum [ 0 ] - dB10 ( power ) return next ( i for i , el in enumerate ( zcross ( root_at_xdb ) ) if el ) / res | A not so fast way to find the x - dB cutoff frequency bin index . |
21,418 | def rint ( x , step = 1 ) : div , mod = divmod ( x , step ) err = min ( step / 10. , .1 ) result = div * step if x > 0 : result += err elif x < 0 : result -= err if ( operator . ge if x >= 0 else operator . gt ) ( 2 * mod , step ) : result += step return int ( result ) | Round to integer . |
21,419 | def blocks ( seq , size = None , hop = None , padval = 0. ) : res = deque ( maxlen = size ) idx = 0 last_idx = size - 1 if hop is None : hop = size reinit_idx = size - hop if hop <= size : for el in seq : res . append ( el ) if idx == last_idx : yield res idx = reinit_idx else : idx += 1 else : for el in seq : if idx < 0 : idx += 1 else : res . append ( el ) if idx == last_idx : yield res idx = size - hop else : idx += 1 if idx > max ( size - hop , 0 ) : for _ in xrange ( idx , size ) : res . append ( padval ) yield res | General iterable blockenizer . |
21,420 | def elementwise ( name = "" , pos = None ) : if ( name == "" ) and ( pos is None ) : pos = 0 def elementwise_decorator ( func ) : @ wraps ( func ) def wrapper ( * args , ** kwargs ) : positional = ( pos is not None ) and ( pos < len ( args ) ) arg = args [ pos ] if positional else kwargs [ name ] if isinstance ( arg , Iterable ) and not isinstance ( arg , STR_TYPES ) : if positional : data = ( func ( * ( args [ : pos ] + ( x , ) + args [ pos + 1 : ] ) , ** kwargs ) for x in arg ) else : data = ( func ( * args , ** dict ( it . chain ( iteritems ( kwargs ) , [ ( name , x ) ] ) ) ) for x in arg ) if isinstance ( arg , SOME_GEN_TYPES ) : return data type_arg = type ( arg ) try : is_numpy = type_arg . __module__ == "numpy" except AttributeError : is_numpy = False if is_numpy : np_type = { "ndarray" : sys . modules [ "numpy" ] . array , "matrix" : sys . modules [ "numpy" ] . mat } [ type_arg . __name__ ] return np_type ( list ( data ) ) from . lazy_stream import Stream if issubclass ( type_arg , Stream ) : return Stream ( data ) return type_arg ( data ) return func ( * args , ** kwargs ) return wrapper return elementwise_decorator | Function auto - map decorator broadcaster . |
21,421 | def almost_eq ( a , b , bits = 32 , tol = 1 , ignore_type = True , pad = 0. ) : if not ( ignore_type or type ( a ) == type ( b ) ) : return False is_it_a = isinstance ( a , Iterable ) is_it_b = isinstance ( b , Iterable ) if is_it_a != is_it_b : return False if is_it_a : return all ( almost_eq . bits ( ai , bi , bits , tol , ignore_type ) for ai , bi in xzip_longest ( a , b , fillvalue = pad ) ) significand = { 32 : 23 , 64 : 52 , 80 : 63 , 128 : 112 } [ bits ] power = tol - significand - 1 return abs ( a - b ) <= 2 ** power * abs ( a + b ) | Almost equal based on the amount of floating point significand bits . |
21,422 | def cached ( func ) : class Cache ( dict ) : def __missing__ ( self , key ) : result = self [ key ] = func ( * key ) return result cache = Cache ( ) f = wraps ( func ) ( lambda * key : cache [ key ] ) f . cache = cache return f | Cache decorator for a function without keyword arguments |
21,423 | def find_dates ( text , source = False , index = False , strict = False , base_date = None ) : date_finder = DateFinder ( base_date = base_date ) return date_finder . find_dates ( text , source = source , index = index , strict = strict ) | Extract datetime strings from text |
21,424 | def _add_tzinfo ( self , datetime_obj , tz_string ) : if datetime_obj is None : return None tzinfo_match = tz . gettz ( tz_string ) return datetime_obj . replace ( tzinfo = tzinfo_match ) | take a naive datetime and add dateutil . tz . tzinfo object |
21,425 | def check_style ( value ) : if sys . version_info [ : 2 ] >= ( 3 , 2 ) : if value not in FORMAT_STYLE_PATTERNS : msg = "Unsupported logging format style! (%r)" raise ValueError ( format ( msg , value ) ) elif value != DEFAULT_FORMAT_STYLE : msg = "Format string styles other than %r require Python 3.2+!" raise ValueError ( msg , DEFAULT_FORMAT_STYLE ) return value | Validate a logging format style . |
21,426 | def increase_verbosity ( ) : defined_levels = sorted ( set ( find_defined_levels ( ) . values ( ) ) ) current_index = defined_levels . index ( get_level ( ) ) selected_index = max ( 0 , current_index - 1 ) set_level ( defined_levels [ selected_index ] ) | Increase the verbosity of the root handler by one defined level . |
21,427 | def decrease_verbosity ( ) : defined_levels = sorted ( set ( find_defined_levels ( ) . values ( ) ) ) current_index = defined_levels . index ( get_level ( ) ) selected_index = min ( current_index + 1 , len ( defined_levels ) - 1 ) set_level ( defined_levels [ selected_index ] ) | Decrease the verbosity of the root handler by one defined level . |
21,428 | def get_level ( ) : handler , logger = find_handler ( logging . getLogger ( ) , match_stream_handler ) return handler . level if handler else DEFAULT_LOG_LEVEL | Get the logging level of the root handler . |
21,429 | def set_level ( level ) : handler , logger = find_handler ( logging . getLogger ( ) , match_stream_handler ) if handler and logger : handler . setLevel ( level_to_number ( level ) ) adjust_level ( logger , level ) else : install ( level = level ) | Set the logging level of the root handler . |
21,430 | def adjust_level ( logger , level ) : level = level_to_number ( level ) if logger . getEffectiveLevel ( ) > level : logger . setLevel ( level ) | Increase a logger s verbosity up to the requested level . |
21,431 | def find_defined_levels ( ) : defined_levels = { } for name in dir ( logging ) : if name . isupper ( ) : value = getattr ( logging , name ) if isinstance ( value , int ) : defined_levels [ name ] = value return defined_levels | Find the defined logging levels . |
21,432 | def level_to_number ( value ) : if is_string ( value ) : try : defined_levels = find_defined_levels ( ) value = defined_levels [ value . upper ( ) ] except KeyError : value = DEFAULT_LOG_LEVEL return value | Coerce a logging level name to a number . |
21,433 | def find_level_aliases ( ) : mapping = collections . defaultdict ( list ) for name , value in find_defined_levels ( ) . items ( ) : mapping [ value ] . append ( name ) aliases = { } for value , names in mapping . items ( ) : if len ( names ) > 1 : names = sorted ( names , key = lambda n : len ( n ) ) canonical_name = names . pop ( ) for alias in names : aliases [ alias ] = canonical_name return aliases | Find log level names which are aliases of each other . |
21,434 | def parse_encoded_styles ( text , normalize_key = None ) : parsed_styles = { } for assignment in split ( text , ';' ) : name , _ , styles = assignment . partition ( '=' ) target = parsed_styles . setdefault ( name , { } ) for token in split ( styles , ',' ) : if token . isdigit ( ) : target [ 'color' ] = int ( token ) elif token in ANSI_COLOR_CODES : target [ 'color' ] = token elif '=' in token : name , _ , value = token . partition ( '=' ) if name in ( 'color' , 'background' ) : if value . isdigit ( ) : target [ name ] = int ( value ) elif value in ANSI_COLOR_CODES : target [ name ] = value else : target [ token ] = True return parsed_styles | Parse text styles encoded in a string into a nested data structure . |
21,435 | def find_hostname ( use_chroot = True ) : for chroot_file in CHROOT_FILES : try : with open ( chroot_file ) as handle : first_line = next ( handle ) name = first_line . strip ( ) if name : return name except Exception : pass return socket . gethostname ( ) | Find the host name to include in log messages . |
21,436 | def find_program_name ( ) : return ( ( os . path . basename ( sys . argv [ 0 ] ) if sys . argv and sys . argv [ 0 ] != '-c' else '' ) or ( os . path . basename ( sys . executable ) if sys . executable else '' ) or 'python' ) | Select a suitable program name to embed in log messages . |
21,437 | def replace_handler ( logger , match_handler , reconfigure ) : handler , other_logger = find_handler ( logger , match_handler ) if handler and other_logger and reconfigure : other_logger . removeHandler ( handler ) logger = other_logger return handler , logger | Prepare to replace a handler . |
21,438 | def walk_propagation_tree ( logger ) : while isinstance ( logger , logging . Logger ) : yield logger if logger . propagate : logger = getattr ( logger , 'parent' , None ) else : logger = None | Walk through the propagation hierarchy of the given logger . |
21,439 | def colorize_format ( self , fmt , style = DEFAULT_FORMAT_STYLE ) : result = [ ] parser = FormatStringParser ( style = style ) for group in parser . get_grouped_pairs ( fmt ) : applicable_styles = [ self . nn . get ( self . field_styles , token . name ) for token in group if token . name ] if sum ( map ( bool , applicable_styles ) ) == 1 : result . append ( ansi_wrap ( '' . join ( token . text for token in group ) , ** next ( s for s in applicable_styles if s ) ) ) else : for token in group : text = token . text if token . name : field_styles = self . nn . get ( self . field_styles , token . name ) if field_styles : text = ansi_wrap ( text , ** field_styles ) result . append ( text ) return '' . join ( result ) | Rewrite a logging format string to inject ANSI escape sequences . |
21,440 | def format ( self , record ) : style = self . nn . get ( self . level_styles , record . levelname ) if style and Empty is not None : copy = Empty ( ) copy . __class__ = ( self . log_record_factory ( ) if self . log_record_factory is not None else logging . LogRecord ) copy . __dict__ . update ( record . __dict__ ) copy . msg = ansi_wrap ( coerce_string ( record . msg ) , ** style ) record = copy return logging . Formatter . format ( self , record ) | Apply level - specific styling to log records . |
21,441 | def get_pairs ( self , format_string ) : for token in self . get_tokens ( format_string ) : match = self . name_pattern . search ( token ) name = match . group ( 1 ) if match else None yield FormatStringToken ( name = name , text = token ) | Tokenize a logging format string and extract field names from tokens . |
21,442 | def get_pattern ( self , field_name ) : return re . compile ( self . raw_pattern . replace ( r'\w+' , field_name ) , re . VERBOSE ) | Get a regular expression to match a formatting directive that references the given field name . |
21,443 | def get_tokens ( self , format_string ) : return [ t for t in self . tokenize_pattern . split ( format_string ) if t ] | Tokenize a logging format string . |
21,444 | def normalize_name ( self , name ) : name = name . lower ( ) if name in self . aliases : name = self . aliases [ name ] return name | Normalize a field or level name . |
21,445 | def main ( ) : actions = [ ] try : options , arguments = getopt . getopt ( sys . argv [ 1 : ] , 'cdh' , [ 'convert' , 'to-html' , 'demo' , 'help' , ] ) for option , value in options : if option in ( '-c' , '--convert' , '--to-html' ) : actions . append ( functools . partial ( convert_command_output , * arguments ) ) arguments = [ ] elif option in ( '-d' , '--demo' ) : actions . append ( demonstrate_colored_logging ) elif option in ( '-h' , '--help' ) : usage ( __doc__ ) return else : assert False , "Programming error: Unhandled option!" if not actions : usage ( __doc__ ) return except Exception as e : warning ( "Error: %s" , e ) sys . exit ( 1 ) for function in actions : function ( ) | Command line interface for the coloredlogs program . |
21,446 | def capture ( command , encoding = 'UTF-8' ) : with open ( os . devnull , 'wb' ) as dev_null : command_line = [ 'script' , '-qc' , ' ' . join ( map ( pipes . quote , command ) ) , '/dev/null' ] script = subprocess . Popen ( command_line , stdout = subprocess . PIPE , stderr = dev_null ) stdout , stderr = script . communicate ( ) if script . returncode == 0 : output = stdout . decode ( encoding ) else : fd , temporary_file = tempfile . mkstemp ( prefix = 'coloredlogs-' , suffix = '-capture.txt' ) try : command_line = [ 'script' , '-q' , temporary_file ] + list ( command ) subprocess . Popen ( command_line , stdout = dev_null , stderr = dev_null ) . wait ( ) with codecs . open ( temporary_file , 'r' , encoding ) as handle : output = handle . read ( ) finally : os . unlink ( temporary_file ) if output . startswith ( b'^D' ) : output = output [ 2 : ] return u'\n' . join ( clean_terminal_output ( output ) ) | Capture the output of an external command as if it runs in an interactive terminal . |
21,447 | def convert ( text , code = True , tabsize = 4 ) : output = [ ] in_span = False compatible_text_styles = { ANSI_TEXT_STYLES [ 'bold' ] : { 'font-weight' : 'bold' } , ANSI_TEXT_STYLES [ 'strike_through' ] : { 'text-decoration' : 'line-through' } , ANSI_TEXT_STYLES [ 'underline' ] : { 'text-decoration' : 'underline' } , } for token in TOKEN_PATTERN . split ( text ) : if token . startswith ( ( 'http://' , 'https://' , 'www.' ) ) : url = token if '://' in token else ( 'http://' + token ) token = u'<a href="%s" style="color:inherit">%s</a>' % ( html_encode ( url ) , html_encode ( token ) ) elif token . startswith ( ANSI_CSI ) : ansi_codes = token [ len ( ANSI_CSI ) : - 1 ] . split ( ';' ) if all ( c . isdigit ( ) for c in ansi_codes ) : ansi_codes = list ( map ( int , ansi_codes ) ) if in_span and ( 0 in ansi_codes or not ansi_codes ) : output . append ( '</span>' ) in_span = False styles = { } is_faint = ( ANSI_TEXT_STYLES [ 'faint' ] in ansi_codes ) is_inverse = ( ANSI_TEXT_STYLES [ 'inverse' ] in ansi_codes ) while ansi_codes : number = ansi_codes . pop ( 0 ) if number in compatible_text_styles : styles . update ( compatible_text_styles [ number ] ) continue text_color = None background_color = None if 30 <= number <= 37 : text_color = EIGHT_COLOR_PALETTE [ number - 30 ] elif 40 <= number <= 47 : background_color = EIGHT_COLOR_PALETTE [ number - 40 ] elif 90 <= number <= 97 : text_color = BRIGHT_COLOR_PALETTE [ number - 90 ] elif 100 <= number <= 107 : background_color = BRIGHT_COLOR_PALETTE [ number - 100 ] elif number in ( 38 , 39 ) and len ( ansi_codes ) >= 2 and ansi_codes [ 0 ] == 5 : try : ansi_codes . pop ( 0 ) color_index = ansi_codes . pop ( 0 ) if number == 38 : text_color = EXTENDED_COLOR_PALETTE [ color_index ] elif number == 39 : background_color = EXTENDED_COLOR_PALETTE [ color_index ] except ( ValueError , IndexError ) : pass if text_color and is_inverse : background_color = text_color text_color = select_text_color ( * parse_hex_color ( text_color ) ) if text_color and is_faint : text_color = '#%02X%02X%02X' % tuple ( max ( 0 , n - 40 ) for n in parse_hex_color ( text_color ) ) if text_color : styles [ 'color' ] = text_color if background_color : styles [ 'background-color' ] = background_color if styles : token = '<span style="%s">' % ';' . join ( k + ':' + v for k , v in sorted ( styles . items ( ) ) ) in_span = True else : token = '' else : token = html_encode ( token ) output . append ( token ) html = '' . join ( output ) html = encode_whitespace ( html , tabsize ) if code : html = '<code>%s</code>' % html return html | Convert text with ANSI escape sequences to HTML . |
21,448 | def encode_whitespace ( text , tabsize = 4 ) : text = text . replace ( '\r\n' , '\n' ) text = text . replace ( '\n' , '<br>\n' ) text = text . expandtabs ( tabsize ) text = re . sub ( INDENT_PATTERN , encode_whitespace_cb , text ) text = re . sub ( TAG_INDENT_PATTERN , r'\1 ' , text ) text = re . sub ( ' {2,}' , encode_whitespace_cb , text ) return text | Encode whitespace so that web browsers properly render it . |
21,449 | def html_encode ( text ) : text = text . replace ( '&' , '&' ) text = text . replace ( '<' , '<' ) text = text . replace ( '>' , '>' ) text = text . replace ( '"' , '"' ) return text | Encode characters with a special meaning as HTML . |
21,450 | def parse_hex_color ( value ) : if value . startswith ( '#' ) : value = value [ 1 : ] if len ( value ) == 3 : return ( int ( value [ 0 ] * 2 , 16 ) , int ( value [ 1 ] * 2 , 16 ) , int ( value [ 2 ] * 2 , 16 ) , ) elif len ( value ) == 6 : return ( int ( value [ 0 : 2 ] , 16 ) , int ( value [ 2 : 4 ] , 16 ) , int ( value [ 4 : 6 ] , 16 ) , ) else : raise ValueError ( ) | Convert a CSS color in hexadecimal notation into its R G B components . |
21,451 | def find_syslog_address ( ) : if sys . platform == 'darwin' and os . path . exists ( LOG_DEVICE_MACOSX ) : return LOG_DEVICE_MACOSX elif os . path . exists ( LOG_DEVICE_UNIX ) : return LOG_DEVICE_UNIX else : return 'localhost' , logging . handlers . SYSLOG_UDP_PORT | Find the most suitable destination for system log messages . |
21,452 | def generate_screenshots ( ) : this_script = os . path . abspath ( __file__ ) this_directory = os . path . dirname ( this_script ) repository = os . path . join ( this_directory , os . pardir ) examples_directory = os . path . join ( repository , 'docs' , 'examples' ) images_directory = os . path . join ( repository , 'docs' , 'images' ) for shell_script in sorted ( glob . glob ( os . path . join ( examples_directory , '*.sh' ) ) ) : basename , extension = os . path . splitext ( os . path . basename ( shell_script ) ) image_file = os . path . join ( images_directory , '%s.png' % basename ) logger . info ( "Generating %s by running %s .." , format_path ( image_file ) , format_path ( shell_script ) ) command_line = [ sys . executable , __file__ , shell_script ] random_title = random_string ( 25 ) urxvt_command = [ 'urxvt' , '-geometry' , '98x30' , '-fg' , TEXT_COLOR , '-bg' , BACKGROUND_COLOR , '-fn' , 'xft:%s:pixelsize=%i' % ( FONT_NAME , FONT_SIZE ) , '-title' , random_title , '+sb' , ] if which ( 'qtile-run' ) : urxvt_command . insert ( 0 , 'qtile-run' ) urxvt_command . insert ( 1 , '-f' ) for index , css_color in enumerate ( EIGHT_COLOR_PALETTE ) : urxvt_command . extend ( ( '--color%i' % index , css_color ) ) urxvt_command . extend ( ( '-e' , 'sh' , '-c' , 'setterm -cursor off; %s' % quote ( command_line ) ) ) execute ( * urxvt_command , async = True ) try : time . sleep ( 10 ) execute ( 'import' , '-window' , random_title , image_file ) execute ( 'convert' , image_file , '-trim' , '-bordercolor' , BACKGROUND_COLOR , '-border' , '5' , image_file ) finally : execute ( 'wmctrl' , '-c' , random_title ) | Generate screenshots from shell scripts . |
21,453 | def interpret_script ( shell_script ) : with CaptureOutput ( ) as capturer : shell = subprocess . Popen ( [ 'bash' , '-' ] , stdin = subprocess . PIPE ) with open ( shell_script ) as handle : for line in handle : sys . stdout . write ( ansi_wrap ( '$' , color = 'green' ) + ' ' + line ) sys . stdout . flush ( ) shell . stdin . write ( line ) shell . stdin . flush ( ) shell . stdin . close ( ) time . sleep ( 12 ) captured_output = capturer . get_text ( ) filename , extension = os . path . splitext ( shell_script ) transcript_file = '%s.txt' % filename logger . info ( "Updating %s .." , format_path ( transcript_file ) ) with open ( transcript_file , 'w' ) as handle : handle . write ( ansi_strip ( captured_output ) ) | Make it appear as if commands are typed into the terminal . |
21,454 | def get_version ( * args ) : contents = get_contents ( * args ) metadata = dict ( re . findall ( '__([a-z]+)__ = [\'"]([^\'"]+)' , contents ) ) return metadata [ 'version' ] | Extract the version number from a Python module . |
21,455 | def have_environment_marker_support ( ) : try : from pkg_resources import parse_version from setuptools import __version__ return parse_version ( __version__ ) >= parse_version ( '0.7.2' ) except Exception : return False | Check whether setuptools has support for PEP - 426 environment marker support . |
21,456 | def _log_http_event ( response ) : logger . info ( '{ip} - - "{method} {path} {status_code}"' . format ( ip = request . remote_addr , method = request . method , path = request . path , status_code = response . status_code ) ) return response | It will create a log event as werkzeug but at the end of request holding the request - id |
21,457 | def build_valid_time ( time , step ) : step_s = step * 3600 if len ( time . shape ) == 0 and len ( step . shape ) == 0 : data = time + step_s dims = ( ) elif len ( time . shape ) > 0 and len ( step . shape ) == 0 : data = time + step_s dims = ( 'time' , ) elif len ( time . shape ) == 0 and len ( step . shape ) > 0 : data = time + step_s dims = ( 'step' , ) else : data = time [ : , None ] + step_s [ None , : ] dims = ( 'time' , 'step' ) return dims , data | Return dimensions and data of the valid_time corresponding to the given time and step . The data is seconds from the same epoch as time and may have one or two dimensions . |
21,458 | def open_file ( path , grib_errors = 'warn' , ** kwargs ) : if 'mode' in kwargs : warnings . warn ( "the `mode` keyword argument is ignored and deprecated" , FutureWarning ) kwargs . pop ( 'mode' ) stream = messages . FileStream ( path , message_class = cfmessage . CfMessage , errors = grib_errors ) return Dataset ( * build_dataset_components ( stream , ** kwargs ) ) | Open a GRIB file as a cfgrib . Dataset . |
21,459 | def open_dataset ( path , ** kwargs ) : if 'engine' in kwargs and kwargs [ 'engine' ] != 'cfgrib' : raise ValueError ( "only engine=='cfgrib' is supported" ) kwargs [ 'engine' ] = 'cfgrib' return xr . backends . api . open_dataset ( path , ** kwargs ) | Return a xr . Dataset with the requested backend_kwargs from a GRIB file . |
21,460 | def open_datasets ( path , backend_kwargs = { } , no_warn = False , ** kwargs ) : if not no_warn : warnings . warn ( "open_datasets is an experimental API, DO NOT RELY ON IT!" , FutureWarning ) fbks = [ ] datasets = [ ] try : datasets . append ( open_dataset ( path , backend_kwargs = backend_kwargs , ** kwargs ) ) except DatasetBuildError as ex : fbks . extend ( ex . args [ 2 ] ) for fbk in fbks : bks = backend_kwargs . copy ( ) bks [ 'filter_by_keys' ] = fbk datasets . extend ( open_datasets ( path , backend_kwargs = bks , no_warn = True , ** kwargs ) ) return datasets | Open a GRIB file groupping incompatible hypercubes to different datasets via simple heuristics . |
21,461 | def codes_get_size ( handle , key ) : size = ffi . new ( 'size_t *' ) _codes_get_size ( handle , key . encode ( ENC ) , size ) return size [ 0 ] | Get the number of coded value from a key . If several keys of the same name are present the total sum is returned . |
21,462 | def codes_get_string_length ( handle , key ) : size = ffi . new ( 'size_t *' ) _codes_get_length ( handle , key . encode ( ENC ) , size ) return size [ 0 ] | Get the length of the string representation of the key . If several keys of the same name are present the maximum length is returned . |
21,463 | def codes_get_bytes_array ( handle , key , size ) : values = ffi . new ( 'unsigned char[]' , size ) size_p = ffi . new ( 'size_t *' , size ) _codes_get_bytes ( handle , key . encode ( ENC ) , values , size_p ) return list ( values ) | Get unsigned chars array values from a key . |
21,464 | def codes_get_long_array ( handle , key , size ) : values = ffi . new ( 'long[]' , size ) size_p = ffi . new ( 'size_t *' , size ) _codes_get_long_array ( handle , key . encode ( ENC ) , values , size_p ) return list ( values ) | Get long array values from a key . |
21,465 | def codes_get_double_array ( handle , key , size ) : values = ffi . new ( 'double[]' , size ) size_p = ffi . new ( 'size_t *' , size ) _codes_get_double_array ( handle , key . encode ( ENC ) , values , size_p ) return list ( values ) | Get double array values from a key . |
21,466 | def codes_get_string_array ( handle , key , size , length = None ) : if length is None : length = codes_get_string_length ( handle , key ) values_keepalive = [ ffi . new ( 'char[]' , length ) for _ in range ( size ) ] values = ffi . new ( 'char*[]' , values_keepalive ) size_p = ffi . new ( 'size_t *' , size ) _codes_get_string_array ( handle , key . encode ( ENC ) , values , size_p ) return [ ffi . string ( values [ i ] ) . decode ( ENC ) for i in range ( size_p [ 0 ] ) ] | Get string array values from a key . |
21,467 | def codes_get_string ( handle , key , length = None ) : if length is None : length = codes_get_string_length ( handle , key ) values = ffi . new ( 'char[]' , length ) length_p = ffi . new ( 'size_t *' , length ) _codes_get_string = check_return ( lib . codes_get_string ) _codes_get_string ( handle , key . encode ( ENC ) , values , length_p ) return ffi . string ( values , length_p [ 0 ] ) . decode ( ENC ) | Get string element from a key . It may or may not fail in case there are more than one key in a message . Outputs the last element . |
21,468 | def codes_get_api_version ( ) : ver = lib . codes_get_api_version ( ) patch = ver % 100 ver = ver // 100 minor = ver % 100 major = ver // 100 return "%d.%d.%d" % ( major , minor , patch ) | Get the API version . |
21,469 | def codes_write ( handle , outfile ) : mess = ffi . new ( 'const void **' ) mess_len = ffi . new ( 'size_t*' ) codes_get_message = check_return ( lib . codes_get_message ) codes_get_message ( handle , mess , mess_len ) message = ffi . buffer ( mess [ 0 ] , size = mess_len [ 0 ] ) outfile . write ( message ) | Write a coded message to a file . If the file does not exist it is created . |
21,470 | def update ( self , sequence ) : item_index = None try : for item in sequence : item_index = self . add ( item ) except TypeError : raise ValueError ( "Argument needs to be an iterable, got %s" % type ( sequence ) ) return item_index | Update the set with the given iterable sequence then return the index of the last element inserted . |
21,471 | def pop ( self ) : if not self . items : raise KeyError ( "Set is empty" ) elem = self . items [ - 1 ] del self . items [ - 1 ] del self . map [ elem ] return elem | Remove and return the last element from the set . |
21,472 | def union ( self , * sets ) : cls = self . __class__ if isinstance ( self , OrderedSet ) else OrderedSet containers = map ( list , it . chain ( [ self ] , sets ) ) items = it . chain . from_iterable ( containers ) return cls ( items ) | Combines all unique items . Each items order is defined by its first appearance . |
21,473 | def intersection ( self , * sets ) : cls = self . __class__ if isinstance ( self , OrderedSet ) else OrderedSet if sets : common = set . intersection ( * map ( set , sets ) ) items = ( item for item in self if item in common ) else : items = self return cls ( items ) | Returns elements in common between all sets . Order is defined only by the first set . |
21,474 | def difference ( self , * sets ) : cls = self . __class__ if sets : other = set . union ( * map ( set , sets ) ) items = ( item for item in self if item not in other ) else : items = self return cls ( items ) | Returns all elements that are in this set but not the others . |
21,475 | def issubset ( self , other ) : if len ( self ) > len ( other ) : return False return all ( item in other for item in self ) | Report whether another set contains this set . |
21,476 | def issuperset ( self , other ) : if len ( self ) < len ( other ) : return False return all ( item in self for item in other ) | Report whether this set contains another set . |
21,477 | def symmetric_difference ( self , other ) : cls = self . __class__ if isinstance ( self , OrderedSet ) else OrderedSet diff1 = cls ( self ) . difference ( other ) diff2 = cls ( other ) . difference ( self ) return diff1 . union ( diff2 ) | Return the symmetric difference of two OrderedSets as a new set . That is the new set will contain all elements that are in exactly one of the sets . |
21,478 | def _update_items ( self , items ) : self . items = items self . map = { item : idx for ( idx , item ) in enumerate ( items ) } | Replace the items list of this OrderedSet with a new one updating self . map accordingly . |
21,479 | def difference_update ( self , * sets ) : items_to_remove = set ( ) for other in sets : items_to_remove |= set ( other ) self . _update_items ( [ item for item in self . items if item not in items_to_remove ] ) | Update this OrderedSet to remove items from one or more other sets . |
21,480 | def intersection_update ( self , other ) : other = set ( other ) self . _update_items ( [ item for item in self . items if item in other ] ) | Update this OrderedSet to keep only items in another set preserving their order in this set . |
21,481 | def symmetric_difference_update ( self , other ) : items_to_add = [ item for item in other if item not in self ] items_to_remove = set ( other ) self . _update_items ( [ item for item in self . items if item not in items_to_remove ] + items_to_add ) | Update this OrderedSet to remove items from another set then add items from the other set that were not present in this set . |
21,482 | def validate_response ( self ) : if self . is_success ( ) : return if self . details : error = self . details . get ( 'error' , None ) if error == PushResponse . ERROR_DEVICE_NOT_REGISTERED : raise DeviceNotRegisteredError ( self ) elif error == PushResponse . ERROR_MESSAGE_TOO_BIG : raise MessageTooBigError ( self ) elif error == PushResponse . ERROR_MESSAGE_RATE_EXCEEDED : raise MessageRateExceededError ( self ) raise PushResponseError ( self ) | Raises an exception if there was an error . Otherwise do nothing . |
21,483 | def is_exponent_push_token ( cls , token ) : import six return ( isinstance ( token , six . string_types ) and token . startswith ( 'ExponentPushToken' ) ) | Returns True if the token is an Exponent push token |
21,484 | def _publish_internal ( self , push_messages ) : import requests response = requests . post ( self . host + self . api_url + '/push/send' , data = json . dumps ( [ pm . get_payload ( ) for pm in push_messages ] ) , headers = { 'accept' : 'application/json' , 'accept-encoding' : 'gzip, deflate' , 'content-type' : 'application/json' , } ) try : response_data = response . json ( ) except ValueError : response . raise_for_status ( ) raise PushServerError ( 'Invalid server response' , response ) if 'errors' in response_data : raise PushServerError ( 'Request failed' , response , response_data = response_data , errors = response_data [ 'errors' ] ) if 'data' not in response_data : raise PushServerError ( 'Invalid server response' , response , response_data = response_data ) response . raise_for_status ( ) if len ( push_messages ) != len ( response_data [ 'data' ] ) : raise PushServerError ( ( 'Mismatched response length. Expected %d %s but only ' 'received %d' % ( len ( push_messages ) , 'receipt' if len ( push_messages ) == 1 else 'receipts' , len ( response_data [ 'data' ] ) ) ) , response , response_data = response_data ) receipts = [ ] for i , receipt in enumerate ( response_data [ 'data' ] ) : receipts . append ( PushResponse ( push_message = push_messages [ i ] , status = receipt . get ( 'status' , PushResponse . ERROR_STATUS ) , message = receipt . get ( 'message' , '' ) , details = receipt . get ( 'details' , None ) ) ) return receipts | Send push notifications |
21,485 | def get_dict_from_buffer ( buf , keys = [ 'DISTNAME' , 'MAJOR' , 'MINOR' , 'PATCHLEVEL' , 'PYTHON' , 'MIN_PYTHON_MAJOR' , 'MIN_PYTHON_MINOR' , 'MIN_NUMPY_MAJOR' , 'MIN_NUMPY_MINOR' ] ) : pairs = dict ( ) if keys is None : keys = "\S+" regex = re . compile ( r . format ( keys ) , re . VERBOSE ) validate = False else : keys = [ k . strip ( ) for k in keys ] regex = re . compile ( r . format ( '|' . join ( keys ) ) , re . VERBOSE ) validate = True for k in keys : pairs [ k ] = [ ] matches = regex . findall ( buf ) for match in matches : key , val = match . split ( '=' , 1 ) key = ( strip_line ( key , ':' ) ) . strip ( ) val = ( strip_line ( val ) ) . strip ( ) if validate and key not in keys : msg = "regex produced incorrect match. regex pattern = {0} " "claims key = [{1}] while original set of search keys " "= {2}" . format ( regex . pattern , key , '|' . join ( keys ) ) raise AssertionError ( msg ) pairs . setdefault ( key , [ ] ) . append ( val ) return pairs | Parses a string buffer for key - val pairs for the supplied keys . |
21,486 | def replace_first_key_in_makefile ( buf , key , replacement , outfile = None ) : regexp = re . compile ( r . format ( key ) , re . VERBOSE ) matches = regexp . findall ( buf ) if matches is None : msg = "Could not find key = {0} in the provided buffer. " "Pattern used = {1}" . format ( key , regexp . pattern ) raise ValueError ( msg ) newbuf = regexp . sub ( replacement , buf , count = 1 ) if outfile is not None : write_text_file ( outfile , newbuf ) return newbuf | Replaces first line in buf matching key with replacement . Optionally writes out this new buffer into outfile . |
21,487 | def settings_to_cmd_args ( settings_dict ) : args = [ 'mysql' ] db = settings_dict [ 'OPTIONS' ] . get ( 'db' , settings_dict [ 'NAME' ] ) user = settings_dict [ 'OPTIONS' ] . get ( 'user' , settings_dict [ 'USER' ] ) passwd = settings_dict [ 'OPTIONS' ] . get ( 'passwd' , settings_dict [ 'PASSWORD' ] ) host = settings_dict [ 'OPTIONS' ] . get ( 'host' , settings_dict [ 'HOST' ] ) port = settings_dict [ 'OPTIONS' ] . get ( 'port' , settings_dict [ 'PORT' ] ) cert = settings_dict [ 'OPTIONS' ] . get ( 'ssl' , { } ) . get ( 'ca' ) defaults_file = settings_dict [ 'OPTIONS' ] . get ( 'read_default_file' ) if defaults_file : args += [ "--defaults-file=%s" % defaults_file ] if user : args += [ "--user=%s" % user ] if passwd : args += [ "--password=%s" % passwd ] if host : if '/' in host : args += [ "--socket=%s" % host ] else : args += [ "--host=%s" % host ] if port : args += [ "--port=%s" % port ] if cert : args += [ "--ssl-ca=%s" % cert ] if db : args += [ db ] return args | Copied from django 1 . 8 MySQL backend DatabaseClient - where the runshell commandline creation has been extracted and made callable like so . |
21,488 | def modify_sql ( sql , add_comments , add_hints , add_index_hints ) : match = query_start_re . match ( sql ) if not match : return sql tokens = [ match . group ( 'keyword' ) ] comments = match . group ( 'comments' ) . strip ( ) if comments : tokens . append ( comments ) for comment in add_comments : tokens . append ( '/*{}*/' . format ( comment ) ) if tokens [ 0 ] == "SELECT" : for group_name , hint_set in SELECT_HINTS . items ( ) : try : to_add = [ hint for hint in add_hints if hint in hint_set ] [ - 1 ] tokens . append ( to_add ) except IndexError : existing = match . group ( group_name ) if existing is not None : tokens . append ( existing . rstrip ( ) ) remainder = sql [ match . end ( ) : ] if tokens [ 0 ] == "SELECT" and add_index_hints : for index_hint in add_index_hints : remainder = modify_sql_index_hints ( remainder , * index_hint ) tokens . append ( remainder ) return ' ' . join ( tokens ) | Parse the start of the SQL injecting each string in add_comments in individual SQL comments after the first keyword and adding the named SELECT hints from add_hints taking the latest in the list in cases of multiple mutually exclusive hints being given |
21,489 | def validate_key ( self , key ) : if len ( key ) > 250 : raise ValueError ( "Cache key is longer than the maxmimum 250 characters: {}" . format ( key ) , ) return super ( MySQLCache , self ) . validate_key ( key ) | Django normally warns about maximum key length but we error on it . |
21,490 | def decode ( self , value , value_type ) : if value_type == 'i' : return int ( value ) if value_type == 'z' : value = zlib . decompress ( value ) value_type = 'p' if value_type == 'p' : return pickle . loads ( force_bytes ( value ) ) raise ValueError ( "Unknown value_type '{}' read from the cache table." . format ( value_type ) , ) | Take a value blob and its value_type one - char code and convert it back to a python object |
21,491 | def _is_simple_query ( cls , query ) : return ( not query . low_mark and not query . high_mark and not query . select and not query . group_by and not query . distinct and not query . order_by and len ( query . alias_map ) <= 1 ) | Inspect the internals of the Query and say if we think its WHERE clause can be used in a HANDLER statement |
21,492 | def animate ( self , seq_name ) : while True : index = 0 anim = getattr ( self . animations , seq_name ) speed = anim . speed if hasattr ( anim , "speed" ) else 1 num_frames = len ( anim . frames ) while index < num_frames : frame = anim . frames [ int ( index ) ] index += speed if isinstance ( frame , int ) : yield self [ frame ] else : for subseq_frame in self . animate ( frame ) : yield subseq_frame if not hasattr ( anim , "next" ) : break seq_name = anim . next | Returns a generator which executes an animation sequence for the given seq_name inasmuch as the next frame for the given animation is yielded when requested . |
21,493 | def effective_FPS ( self ) : if self . start_time is None : self . start_time = 0 elapsed = monotonic ( ) - self . start_time return self . called / elapsed | Calculates the effective frames - per - second - this should largely correlate to the desired FPS supplied in the constructor but no guarantees are given . |
21,494 | def _crop_box ( self , size ) : ( left , top ) = self . offset right = left + min ( size [ 0 ] , self . width ) bottom = top + min ( size [ 1 ] , self . height ) return ( left , top , right , bottom ) | Helper that calculates the crop box for the offset within the image . |
21,495 | def refresh ( self ) : self . _clear ( ) for img in self . composed_images : self . _background_image . paste ( img . image ( self . _device . size ) , img . position ) self . _background_image . crop ( box = self . _device . bounding_box ) | Clears the composition and renders all the images taking into account their position and offset . |
21,496 | def _clear ( self ) : draw = ImageDraw . Draw ( self . _background_image ) draw . rectangle ( self . _device . bounding_box , fill = "black" ) del draw | Helper that clears the composition . |
21,497 | def inflate_bbox ( self ) : left , top , right , bottom = self . bounding_box self . bounding_box = ( left & 0xFFFC , top , right if right % 4 == 0 else ( right & 0xFFFC ) + 0x04 , bottom ) return self . bounding_box | Realign the left and right edges of the bounding box such that they are inflated to align modulo 4 . |
21,498 | def println ( self , text = "" ) : if self . word_wrap : directives = ansi_color . find_directives ( text , self ) clean_text = ansi_color . strip_ansi_codes ( text ) clean_lines = self . tw . wrap ( clean_text ) index = 0 for line in clean_lines : line_length = len ( line ) y = 0 while y < line_length : method , args = directives [ index ] if method == self . putch : y += 1 method ( * args ) index += 1 self . newline ( ) else : self . puts ( text ) self . newline ( ) | Prints the supplied text to the device scrolling where necessary . The text is always followed by a newline . |
21,499 | def newline ( self ) : self . carriage_return ( ) if self . _cy + ( 2 * self . _ch ) >= self . _device . height : copy = self . _backing_image . crop ( ( 0 , self . _ch , self . _device . width , self . _device . height ) ) self . _backing_image . paste ( copy , ( 0 , 0 ) ) self . _canvas . rectangle ( ( 0 , copy . height , self . _device . width , self . _device . height ) , fill = self . default_bgcolor ) else : self . _cy += self . _ch self . flush ( ) if self . animate : time . sleep ( 0.2 ) | Advances the cursor position ot the left hand side and to the next line . If the cursor is on the lowest line the displayed contents are scrolled causing the top line to be lost . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.