idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
226,200
def write ( filename , rate , data ) : if hasattr ( filename , 'write' ) : fid = filename else : fid = open ( filename , 'wb' ) try : dkind = data . dtype . kind if not ( dkind == 'i' or dkind == 'f' or ( dkind == 'u' and data . dtype . itemsize == 1 ) ) : raise ValueError ( "Unsupported data type '%s'" % data . dtype ) fid . write ( b'RIFF' ) fid . write ( b'\x00\x00\x00\x00' ) fid . write ( b'WAVE' ) # fmt chunk fid . write ( b'fmt ' ) if dkind == 'f' : comp = 3 else : comp = 1 if data . ndim == 1 : noc = 1 else : noc = data . shape [ 1 ] bits = data . dtype . itemsize * 8 sbytes = rate * ( bits // 8 ) * noc ba = noc * ( bits // 8 ) fid . write ( struct . pack ( '<ihHIIHH' , 16 , comp , noc , rate , sbytes , ba , bits ) ) # data chunk fid . write ( b'data' ) fid . write ( struct . pack ( '<i' , data . nbytes ) ) if data . dtype . byteorder == '>' or ( data . dtype . byteorder == '=' and sys . byteorder == 'big' ) : data = data . byteswap ( ) _array_tofile ( fid , data ) # Determine file size and place it in correct # position at start of the file. size = fid . tell ( ) fid . seek ( 4 ) fid . write ( struct . pack ( '<i' , size - 8 ) ) finally : if not hasattr ( filename , 'write' ) : fid . close ( ) else : fid . seek ( 0 )
Write a numpy array as a WAV file
423
10
226,201
def safe_print ( msg ) : try : print ( msg ) except UnicodeEncodeError : try : # NOTE encoding and decoding so that in Python 3 no b"..." is printed encoded = msg . encode ( sys . stdout . encoding , "replace" ) decoded = encoded . decode ( sys . stdout . encoding , "replace" ) print ( decoded ) except ( UnicodeDecodeError , UnicodeEncodeError ) : print ( u"[ERRO] An unexpected error happened while printing to stdout." ) print ( u"[ERRO] Please check that your file/string encoding matches the shell encoding." ) print ( u"[ERRO] If possible, set your shell encoding to UTF-8 and convert any files with legacy encodings." )
Safely print a given Unicode string to stdout possibly replacing characters non - printable in the current stdout encoding .
159
24
226,202
def print_error ( msg , color = True ) : if color and is_posix ( ) : safe_print ( u"%s[ERRO] %s%s" % ( ANSI_ERROR , msg , ANSI_END ) ) else : safe_print ( u"[ERRO] %s" % ( msg ) )
Print an error message .
73
5
226,203
def print_success ( msg , color = True ) : if color and is_posix ( ) : safe_print ( u"%s[INFO] %s%s" % ( ANSI_OK , msg , ANSI_END ) ) else : safe_print ( u"[INFO] %s" % ( msg ) )
Print a success message .
71
5
226,204
def print_warning ( msg , color = True ) : if color and is_posix ( ) : safe_print ( u"%s[WARN] %s%s" % ( ANSI_WARNING , msg , ANSI_END ) ) else : safe_print ( u"[WARN] %s" % ( msg ) )
Print a warning message .
71
5
226,205
def file_extension ( path ) : if path is None : return None ext = os . path . splitext ( os . path . basename ( path ) ) [ 1 ] if ext . startswith ( "." ) : ext = ext [ 1 : ] return ext
Return the file extension .
59
5
226,206
def mimetype_from_path ( path ) : extension = file_extension ( path ) if extension is not None : extension = extension . lower ( ) if extension in gc . MIMETYPE_MAP : return gc . MIMETYPE_MAP [ extension ] return None
Return a mimetype from the file extension .
64
10
226,207
def file_name_without_extension ( path ) : if path is None : return None return os . path . splitext ( os . path . basename ( path ) ) [ 0 ]
Return the file name without extension .
42
7
226,208
def safe_float ( string , default = None ) : value = default try : value = float ( string ) except TypeError : pass except ValueError : pass return value
Safely parse a string into a float .
35
9
226,209
def safe_int ( string , default = None ) : value = safe_float ( string , default ) if value is not None : value = int ( value ) return value
Safely parse a string into an int .
36
9
226,210
def safe_get ( dictionary , key , default_value , can_return_none = True ) : return_value = default_value try : return_value = dictionary [ key ] if ( return_value is None ) and ( not can_return_none ) : return_value = default_value except ( KeyError , TypeError ) : # KeyError if key is not present in dictionary # TypeError if dictionary is None pass return return_value
Safely perform a dictionary get returning the default value if the key is not found .
95
17
226,211
def norm_join ( prefix , suffix ) : if ( prefix is None ) and ( suffix is None ) : return "." if prefix is None : return os . path . normpath ( suffix ) if suffix is None : return os . path . normpath ( prefix ) return os . path . normpath ( os . path . join ( prefix , suffix ) )
Join prefix and suffix paths and return the resulting path normalized .
75
12
226,212
def copytree ( source_directory , destination_directory , ignore = None ) : if os . path . isdir ( source_directory ) : if not os . path . isdir ( destination_directory ) : os . makedirs ( destination_directory ) files = os . listdir ( source_directory ) if ignore is not None : ignored = ignore ( source_directory , files ) else : ignored = set ( ) for f in files : if f not in ignored : copytree ( os . path . join ( source_directory , f ) , os . path . join ( destination_directory , f ) , ignore ) else : shutil . copyfile ( source_directory , destination_directory )
Recursively copy the contents of a source directory into a destination directory . Both directories must exist .
146
20
226,213
def ensure_parent_directory ( path , ensure_parent = True ) : parent_directory = os . path . abspath ( path ) if ensure_parent : parent_directory = os . path . dirname ( parent_directory ) if not os . path . exists ( parent_directory ) : try : os . makedirs ( parent_directory ) except ( IOError , OSError ) : raise OSError ( u"Directory '%s' cannot be created" % parent_directory )
Ensures the parent directory exists .
107
8
226,214
def can_run_c_extension ( name = None ) : def can_run_cdtw ( ) : """ Python C extension for computing DTW """ try : import aeneas . cdtw . cdtw return True except ImportError : return False def can_run_cmfcc ( ) : """ Python C extension for computing MFCC """ try : import aeneas . cmfcc . cmfcc return True except ImportError : return False def can_run_cew ( ) : """ Python C extension for synthesizing with eSpeak """ try : import aeneas . cew . cew return True except ImportError : return False def can_run_cfw ( ) : """ Python C extension for synthesizing with Festival """ try : import aeneas . cfw . cfw return True except ImportError : return False if name == "cdtw" : return can_run_cdtw ( ) elif name == "cmfcc" : return can_run_cmfcc ( ) elif name == "cew" : return can_run_cew ( ) elif name == "cfw" : return can_run_cfw ( ) else : # NOTE cfw is still experimental! return can_run_cdtw ( ) and can_run_cmfcc ( ) and can_run_cew ( )
Determine whether the given Python C extension loads correctly .
289
12
226,215
def run_c_extension_with_fallback ( log_function , extension , c_function , py_function , args , rconf ) : computed = False if not rconf [ u"c_extensions" ] : log_function ( u"C extensions disabled" ) elif extension not in rconf : log_function ( [ u"C extension '%s' not recognized" , extension ] ) elif not rconf [ extension ] : log_function ( [ u"C extension '%s' disabled" , extension ] ) else : log_function ( [ u"C extension '%s' enabled" , extension ] ) if c_function is None : log_function ( u"C function is None" ) elif can_run_c_extension ( extension ) : log_function ( [ u"C extension '%s' enabled and it can be loaded" , extension ] ) computed , result = c_function ( * args ) else : log_function ( [ u"C extension '%s' enabled but it cannot be loaded" , extension ] ) if not computed : if py_function is None : log_function ( u"Python function is None" ) else : log_function ( u"Running the pure Python code" ) computed , result = py_function ( * args ) if not computed : raise RuntimeError ( u"Both the C extension and the pure Python code failed. (Wrong arguments? Input too big?)" ) return result
Run a function calling a C extension falling back to a pure Python function if the former does not succeed .
315
21
226,216
def file_can_be_read ( path ) : if path is None : return False try : with io . open ( path , "rb" ) as test_file : pass return True except ( IOError , OSError ) : pass return False
Return True if the file at the given path can be read .
54
13
226,217
def file_can_be_written ( path ) : if path is None : return False try : with io . open ( path , "wb" ) as test_file : pass delete_file ( None , path ) return True except ( IOError , OSError ) : pass return False
Return True if a file can be written at the given path .
62
13
226,218
def read_file_bytes ( input_file_path ) : contents = None try : with io . open ( input_file_path , "rb" ) as input_file : contents = input_file . read ( ) except : pass return contents
Read the file at the given file path and return its contents as a byte string or None if an error occurred .
53
23
226,219
def human_readable_number ( number , suffix = "" ) : for unit in [ "" , "K" , "M" , "G" , "T" , "P" , "E" , "Z" ] : if abs ( number ) < 1024.0 : return "%3.1f%s%s" % ( number , unit , suffix ) number /= 1024.0 return "%.1f%s%s" % ( number , "Y" , suffix )
Format the given number into a human - readable string .
104
11
226,220
def safe_unichr ( codepoint ) : if is_py2_narrow_build ( ) : return ( "\\U%08x" % codepoint ) . decode ( "unicode-escape" ) elif PY2 : return unichr ( codepoint ) return chr ( codepoint )
Safely return a Unicode string of length one containing the Unicode character with given codepoint .
72
19
226,221
def safe_unicode_stdin ( string ) : if string is None : return None if is_bytes ( string ) : if FROZEN : return string . decode ( "utf-8" ) try : return string . decode ( sys . stdin . encoding ) except UnicodeDecodeError : return string . decode ( sys . stdin . encoding , "replace" ) except : return string . decode ( "utf-8" ) return string
Safely convert the given string to a Unicode string decoding using sys . stdin . encoding if needed .
95
21
226,222
def get ( self , fragment_info ) : if not self . is_cached ( fragment_info ) : raise KeyError ( u"Attempt to get text not cached" ) return self . cache [ fragment_info ]
Get the value associated with the given key .
47
9
226,223
def clear ( self ) : self . log ( u"Clearing cache..." ) for file_handler , file_info in self . cache . values ( ) : self . log ( [ u" Removing file '%s'" , file_info ] ) gf . delete_file ( file_handler , file_info ) self . _initialize_cache ( ) self . log ( u"Clearing cache... done" )
Clear the cache and remove all the files from disk .
91
11
226,224
def _language_to_voice_code ( self , language ) : voice_code = self . rconf [ RuntimeConfiguration . TTS_VOICE_CODE ] if voice_code is None : try : voice_code = self . LANGUAGE_TO_VOICE_CODE [ language ] except KeyError as exc : self . log_exc ( u"Language code '%s' not found in LANGUAGE_TO_VOICE_CODE" % ( language ) , exc , False , None ) self . log_warn ( u"Using the language code as the voice code" ) voice_code = language else : self . log ( u"TTS voice override in rconf" ) self . log ( [ u"Language to voice code: '%s' => '%s'" , language , voice_code ] ) return voice_code
Translate a language value to a voice code .
186
10
226,225
def clear_cache ( self ) : if self . use_cache : self . log ( u"Requested to clear TTS cache" ) self . cache . clear ( )
Clear the TTS cache removing all cache files from disk .
37
12
226,226
def set_subprocess_arguments ( self , subprocess_arguments ) : # NOTE this is a method because we might need to access self.rconf, # so we cannot specify the list of arguments as a class field self . subprocess_arguments = subprocess_arguments self . log ( [ u"Subprocess arguments: %s" , subprocess_arguments ] )
Set the list of arguments that the wrapper will pass to subprocess .
83
14
226,227
def synthesize_multiple ( self , text_file , output_file_path , quit_after = None , backwards = False ) : if text_file is None : self . log_exc ( u"text_file is None" , None , True , TypeError ) if len ( text_file ) < 1 : self . log_exc ( u"The text file has no fragments" , None , True , ValueError ) if text_file . chars == 0 : self . log_exc ( u"All fragments in the text file are empty" , None , True , ValueError ) if not self . rconf [ RuntimeConfiguration . ALLOW_UNLISTED_LANGUAGES ] : for fragment in text_file . fragments : if fragment . language not in self . LANGUAGE_TO_VOICE_CODE : self . log_exc ( u"Language '%s' is not supported by the selected TTS engine" % ( fragment . language ) , None , True , ValueError ) for fragment in text_file . fragments : for line in fragment . lines : if not gf . is_unicode ( line ) : self . log_exc ( u"The text file contain a line which is not a Unicode string" , None , True , TypeError ) # log parameters if quit_after is not None : self . log ( [ u"Quit after reaching %.3f" , quit_after ] ) if backwards : self . log ( u"Synthesizing backwards" ) # check that output_file_path can be written if not gf . file_can_be_written ( output_file_path ) : self . log_exc ( u"Cannot write to output file '%s'" % ( output_file_path ) , None , True , OSError ) # first, call Python function _synthesize_multiple_python() if available if self . HAS_PYTHON_CALL : self . log ( u"Calling TTS engine via Python" ) try : computed , result = self . _synthesize_multiple_python ( text_file , output_file_path , quit_after , backwards ) if computed : self . log ( u"The _synthesize_multiple_python call was successful, returning anchors" ) return result else : self . log ( u"The _synthesize_multiple_python call failed" ) except Exception as exc : self . log_exc ( u"An unexpected error occurred while calling _synthesize_multiple_python" , exc , False , None ) # call _synthesize_multiple_c_extension() or _synthesize_multiple_subprocess() self . log ( u"Calling TTS engine via C extension or subprocess" ) c_extension_function = self . _synthesize_multiple_c_extension if self . HAS_C_EXTENSION_CALL else None subprocess_function = self . _synthesize_multiple_subprocess if self . HAS_SUBPROCESS_CALL else None return gf . run_c_extension_with_fallback ( self . log , self . C_EXTENSION_NAME , c_extension_function , subprocess_function , ( text_file , output_file_path , quit_after , backwards ) , rconf = self . rconf )
Synthesize the text contained in the given fragment list into a WAVE file .
736
18
226,228
def _synthesize_multiple_python ( self , text_file , output_file_path , quit_after = None , backwards = False ) : self . log ( u"Synthesizing multiple via a Python call..." ) ret = self . _synthesize_multiple_generic ( helper_function = self . _synthesize_single_python_helper , text_file = text_file , output_file_path = output_file_path , quit_after = quit_after , backwards = backwards ) self . log ( u"Synthesizing multiple via a Python call... done" ) return ret
Synthesize multiple fragments via a Python call .
137
11
226,229
def _synthesize_multiple_subprocess ( self , text_file , output_file_path , quit_after = None , backwards = False ) : self . log ( u"Synthesizing multiple via subprocess..." ) ret = self . _synthesize_multiple_generic ( helper_function = self . _synthesize_single_subprocess_helper , text_file = text_file , output_file_path = output_file_path , quit_after = quit_after , backwards = backwards ) self . log ( u"Synthesizing multiple via subprocess... done" ) return ret
Synthesize multiple fragments via subprocess .
137
10
226,230
def _read_audio_data ( self , file_path ) : try : self . log ( u"Reading audio data..." ) # if we know the TTS outputs to PCM16 mono WAVE # with the correct sample rate, # we can read samples directly from it, # without an intermediate conversion through ffmpeg audio_file = AudioFile ( file_path = file_path , file_format = self . OUTPUT_AUDIO_FORMAT , rconf = self . rconf , logger = self . logger ) audio_file . read_samples_from_file ( ) self . log ( [ u"Duration of '%s': %f" , file_path , audio_file . audio_length ] ) self . log ( u"Reading audio data... done" ) return ( True , ( audio_file . audio_length , audio_file . audio_sample_rate , audio_file . audio_format , audio_file . audio_samples ) ) except ( AudioFileUnsupportedFormatError , OSError ) as exc : self . log_exc ( u"An unexpected error occurred while reading audio data" , exc , True , None ) return ( False , None )
Read audio data from file .
256
6
226,231
def _loop_no_cache ( self , helper_function , num , fragment ) : self . log ( [ u"Examining fragment %d (no cache)..." , num ] ) # synthesize and get the duration of the output file voice_code = self . _language_to_voice_code ( fragment . language ) self . log ( u"Calling helper function" ) succeeded , data = helper_function ( text = fragment . filtered_text , voice_code = voice_code , output_file_path = None , return_audio_data = True ) # check output if not succeeded : self . log_crit ( u"An unexpected error occurred in helper_function" ) return ( False , None ) self . log ( [ u"Examining fragment %d (no cache)... done" , num ] ) return ( True , data )
Synthesize all fragments without using the cache
182
10
226,232
def _loop_use_cache ( self , helper_function , num , fragment ) : self . log ( [ u"Examining fragment %d (cache)..." , num ] ) fragment_info = ( fragment . language , fragment . filtered_text ) if self . cache . is_cached ( fragment_info ) : self . log ( u"Fragment cached: retrieving audio data from cache" ) # read data from file, whose path is in the cache file_handler , file_path = self . cache . get ( fragment_info ) self . log ( [ u"Reading cached fragment at '%s'..." , file_path ] ) succeeded , data = self . _read_audio_data ( file_path ) if not succeeded : self . log_crit ( u"An unexpected error occurred while reading cached audio file" ) return ( False , None ) self . log ( [ u"Reading cached fragment at '%s'... done" , file_path ] ) else : self . log ( u"Fragment not cached: synthesizing and caching" ) # creating destination file file_info = gf . tmp_file ( suffix = u".cache.wav" , root = self . rconf [ RuntimeConfiguration . TMP_PATH ] ) file_handler , file_path = file_info self . log ( [ u"Synthesizing fragment to '%s'..." , file_path ] ) # synthesize and get the duration of the output file voice_code = self . _language_to_voice_code ( fragment . language ) self . log ( u"Calling helper function" ) succeeded , data = helper_function ( text = fragment . filtered_text , voice_code = voice_code , output_file_path = file_path , return_audio_data = True ) # check output if not succeeded : self . log_crit ( u"An unexpected error occurred in helper_function" ) return ( False , None ) self . log ( [ u"Synthesizing fragment to '%s'... done" , file_path ] ) duration , sr_nu , enc_nu , samples = data if duration > 0 : self . log ( u"Fragment has > 0 duration, adding it to cache" ) self . cache . add ( fragment_info , file_info ) self . log ( u"Added fragment to cache" ) else : self . log ( u"Fragment has zero duration, not adding it to cache" ) self . log ( [ u"Closing file handler for cached output file path '%s'" , file_path ] ) gf . close_file_handler ( file_handler ) self . log ( [ u"Examining fragment %d (cache)... done" , num ] ) return ( True , data )
Synthesize all fragments using the cache
594
9
226,233
def adjust ( self , aba_parameters , boundary_indices , real_wave_mfcc , text_file , allow_arbitrary_shift = False ) : self . log ( u"Called adjust" ) if boundary_indices is None : self . log_exc ( u"boundary_indices is None" , None , True , TypeError ) if not isinstance ( real_wave_mfcc , AudioFileMFCC ) : self . log_exc ( u"real_wave_mfcc is not an AudioFileMFCC object" , None , True , TypeError ) if not isinstance ( text_file , TextFile ) : self . log_exc ( u"text_file is not a TextFile object" , None , True , TypeError ) nozero = aba_parameters [ "nozero" ] ns_min , ns_string = aba_parameters [ "nonspeech" ] algorithm , algo_parameters = aba_parameters [ "algorithm" ] self . log ( u" Converting boundary indices to fragment list..." ) begin = real_wave_mfcc . middle_begin * real_wave_mfcc . rconf . mws end = real_wave_mfcc . middle_end * real_wave_mfcc . rconf . mws time_values = [ begin ] + list ( boundary_indices * self . mws ) + [ end ] self . intervals_to_fragment_list ( text_file = text_file , time_values = time_values ) self . log ( u" Converting boundary indices to fragment list... done" ) self . log ( u" Processing fragments with zero length..." ) self . _process_zero_length ( nozero , allow_arbitrary_shift ) self . log ( u" Processing fragments with zero length... done" ) self . log ( u" Processing nonspeech fragments..." ) self . _process_long_nonspeech ( ns_min , ns_string , real_wave_mfcc ) self . log ( u" Processing nonspeech fragments... done" ) self . log ( u" Adjusting..." ) ALGORITHM_MAP = { self . AFTERCURRENT : self . _adjust_aftercurrent , self . AUTO : self . _adjust_auto , self . BEFORENEXT : self . _adjust_beforenext , self . OFFSET : self . _adjust_offset , self . PERCENT : self . _adjust_percent , self . RATE : self . _adjust_rate , self . RATEAGGRESSIVE : self . _adjust_rate_aggressive , } ALGORITHM_MAP [ algorithm ] ( real_wave_mfcc , algo_parameters ) self . log ( u" Adjusting... done" ) self . log ( u" Smoothing..." ) self . _smooth_fragment_list ( real_wave_mfcc . audio_length , ns_string ) self . log ( u" Smoothing... done" ) return self . smflist
Adjust the boundaries of the text map using the algorithm and parameters specified in the constructor storing the sync map fragment list internally .
679
24
226,234
def append_fragment_list_to_sync_root ( self , sync_root ) : if not isinstance ( sync_root , Tree ) : self . log_exc ( u"sync_root is not a Tree object" , None , True , TypeError ) self . log ( u"Appending fragment list to sync root..." ) for fragment in self . smflist : sync_root . add_child ( Tree ( value = fragment ) ) self . log ( u"Appending fragment list to sync root... done" )
Append the sync map fragment list to the given node from a sync map tree .
115
17
226,235
def _process_zero_length ( self , nozero , allow_arbitrary_shift ) : self . log ( u"Called _process_zero_length" ) if not nozero : self . log ( u"Processing zero length intervals not requested: returning" ) return self . log ( u"Processing zero length intervals requested" ) self . log ( u" Checking and fixing..." ) duration = self . rconf [ RuntimeConfiguration . ABA_NO_ZERO_DURATION ] self . log ( [ u" Requested no zero duration: %.3f" , duration ] ) if not allow_arbitrary_shift : self . log ( u" No arbitrary shift => taking max with mws" ) duration = self . rconf . mws . geq_multiple ( duration ) self . log ( [ u" Actual no zero duration: %.3f" , duration ] ) # ignore HEAD and TAIL max_index = len ( self . smflist ) - 1 self . smflist . fix_zero_length_fragments ( duration = duration , min_index = 1 , max_index = max_index ) self . log ( u" Checking and fixing... done" ) if self . smflist . has_zero_length_fragments ( 1 , max_index ) : self . log_warn ( u" The fragment list still has fragments with zero length" ) else : self . log ( u" The fragment list does not have fragments with zero length" )
If nozero is True modify the sync map fragment list so that no fragment will have zero length .
325
20
226,236
def main ( ) : if FROZEN : HydraCLI ( invoke = "aeneas-cli" ) . run ( arguments = sys . argv , show_help = False ) else : HydraCLI ( invoke = "pyinstaller-aeneas-cli.py" ) . run ( arguments = sys . argv , show_help = False )
This is the aeneas - cli hydra script to be compiled by pyinstaller .
79
19
226,237
def _compute_runs ( self , array ) : if len ( array ) < 1 : return [ ] return numpy . split ( array , numpy . where ( numpy . diff ( array ) != 1 ) [ 0 ] + 1 )
Compute runs as a list of arrays each containing the indices of a contiguous run .
52
17
226,238
def _rolling_window ( self , array , size ) : shape = array . shape [ : - 1 ] + ( array . shape [ - 1 ] - size + 1 , size ) strides = array . strides + ( array . strides [ - 1 ] , ) return numpy . lib . stride_tricks . as_strided ( array , shape = shape , strides = strides )
Compute rolling windows of width size of the given array .
81
12
226,239
def usage ( self ) : retval , stderr , stdout = _pysam_dispatch ( self . collection , self . dispatch , is_usage = True , catch_stdout = True ) # some tools write usage to stderr, such as mpileup if stderr : return stderr else : return stdout
return the samtools usage information for this command
74
9
226,240
def iterate ( infile ) : conv_subst = ( str , lambda x : int ( x ) - 1 , str , str , int , int , int , int , str , str ) conv_indel = ( str , lambda x : int ( x ) - 1 , str , str , int , int , int , int , str , str , int , int , int ) for line in infile : d = line [ : - 1 ] . split ( ) if d [ 2 ] == "*" : try : yield PileupIndel ( * [ x ( y ) for x , y in zip ( conv_indel , d ) ] ) except TypeError : raise pysam . SamtoolsError ( "parsing error in line: `%s`" % line ) else : try : yield PileupSubstitution ( * [ x ( y ) for x , y in zip ( conv_subst , d ) ] ) except TypeError : raise pysam . SamtoolsError ( "parsing error in line: `%s`" % line )
iterate over samtools pileup - c formatted file .
233
12
226,241
def vcf2pileup ( vcf , sample ) : chromosome = vcf . contig pos = vcf . pos reference = vcf . ref allelles = [ reference ] + vcf . alt data = vcf [ sample ] # get genotype genotypes = data [ "GT" ] if len ( genotypes ) > 1 : raise ValueError ( "only single genotype per position, %s" % ( str ( vcf ) ) ) genotypes = genotypes [ 0 ] # not a variant if genotypes [ 0 ] == "." : return None genotypes = [ allelles [ int ( x ) ] for x in genotypes if x != "/" ] # snp_quality is "genotype quality" snp_quality = consensus_quality = data . get ( "GQ" , [ 0 ] ) [ 0 ] mapping_quality = vcf . info . get ( "MQ" , [ 0 ] ) [ 0 ] coverage = data . get ( "DP" , 0 ) if len ( reference ) > 1 or max ( [ len ( x ) for x in vcf . alt ] ) > 1 : # indel genotype , offset = translateIndelGenotypeFromVCF ( genotypes , reference ) return PileupIndel ( chromosome , pos + offset , "*" , genotype , consensus_quality , snp_quality , mapping_quality , coverage , genotype , "<" * len ( genotype ) , 0 , 0 , 0 ) else : genotype = encodeGenotype ( "" . join ( genotypes ) ) read_bases = "" base_qualities = "" return PileupSubstitution ( chromosome , pos , reference , genotype , consensus_quality , snp_quality , mapping_quality , coverage , read_bases , base_qualities )
convert vcf record to pileup record .
389
10
226,242
def iterate_from_vcf ( infile , sample ) : vcf = pysam . VCF ( ) vcf . connect ( infile ) if sample not in vcf . getsamples ( ) : raise KeyError ( "sample %s not vcf file" ) for row in vcf . fetch ( ) : result = vcf2pileup ( row , sample ) if result : yield result
iterate over a vcf - formatted file .
88
10
226,243
def _update_pysam_files ( cf , destdir ) : basename = os . path . basename ( destdir ) for filename in cf : if not filename : continue dest = filename + ".pysam.c" with open ( filename , encoding = "utf-8" ) as infile : lines = "" . join ( infile . readlines ( ) ) with open ( dest , "w" , encoding = "utf-8" ) as outfile : outfile . write ( '#include "{}.pysam.h"\n\n' . format ( basename ) ) subname , _ = os . path . splitext ( os . path . basename ( filename ) ) if subname in MAIN . get ( basename , [ ] ) : lines = re . sub ( r"int main\(" , "int {}_main(" . format ( basename ) , lines ) else : lines = re . sub ( r"int main\(" , "int {}_{}_main(" . format ( basename , subname ) , lines ) lines = re . sub ( "stderr" , "{}_stderr" . format ( basename ) , lines ) lines = re . sub ( "stdout" , "{}_stdout" . format ( basename ) , lines ) lines = re . sub ( r" printf\(" , " fprintf({}_stdout, " . format ( basename ) , lines ) lines = re . sub ( r"([^kf])puts\(" , r"\1{}_puts(" . format ( basename ) , lines ) lines = re . sub ( r"putchar\(([^)]+)\)" , r"fputc(\1, {}_stdout)" . format ( basename ) , lines ) fn = os . path . basename ( filename ) # some specific fixes: SPECIFIC_SUBSTITUTIONS = { "bam_md.c" : ( 'sam_open_format("-", mode_w' , 'sam_open_format({}_stdout_fn, mode_w' . format ( basename ) ) , "phase.c" : ( 'putc("ACGT"[f->seq[j] == 1? (c&3, {}_stdout) : (c>>16&3)]);' . format ( basename ) , 'putc("ACGT"[f->seq[j] == 1? (c&3) : (c>>16&3)], {}_stdout);' . format ( basename ) ) , "cut_target.c" : ( 'putc(33 + (cns[j]>>8>>2, {}_stdout));' . format ( basename ) , 'putc(33 + (cns[j]>>8>>2), {}_stdout);' . format ( basename ) ) } if fn in SPECIFIC_SUBSTITUTIONS : lines = lines . replace ( SPECIFIC_SUBSTITUTIONS [ fn ] [ 0 ] , SPECIFIC_SUBSTITUTIONS [ fn ] [ 1 ] ) outfile . write ( lines ) with open ( os . path . join ( "import" , "pysam.h" ) ) as inf , open ( os . path . join ( destdir , "{}.pysam.h" . format ( basename ) ) , "w" ) as outf : outf . write ( re . sub ( "@pysam@" , basename , inf . read ( ) ) ) with open ( os . path . join ( "import" , "pysam.c" ) ) as inf , open ( os . path . join ( destdir , "{}.pysam.c" . format ( basename ) ) , "w" ) as outf : outf . write ( re . sub ( "@pysam@" , basename , inf . read ( ) ) )
update pysam files applying redirection of ouput
866
12
226,244
def get_include ( ) : dirname = os . path . abspath ( os . path . join ( os . path . dirname ( __file__ ) ) ) # # Header files may be stored in different relative locations # depending on installation mode (e.g., `python setup.py install`, # `python setup.py develop`. The first entry in each list is # where develop-mode headers can be found. # htslib_possibilities = [ os . path . join ( dirname , '..' , 'htslib' ) , os . path . join ( dirname , 'include' , 'htslib' ) ] samtool_possibilities = [ os . path . join ( dirname , '..' , 'samtools' ) , os . path . join ( dirname , 'include' , 'samtools' ) ] includes = [ dirname ] for header_locations in [ htslib_possibilities , samtool_possibilities ] : for header_location in header_locations : if os . path . exists ( header_location ) : includes . append ( os . path . abspath ( header_location ) ) break return includes
return a list of include directories .
255
7
226,245
def get_libraries ( ) : # Note that this list does not include libcsamtools.so as there are # numerous name conflicts with libchtslib.so. dirname = os . path . abspath ( os . path . join ( os . path . dirname ( __file__ ) ) ) pysam_libs = [ 'libctabixproxies' , 'libcfaidx' , 'libcsamfile' , 'libcvcf' , 'libcbcf' , 'libctabix' ] if pysam . config . HTSLIB == "builtin" : pysam_libs . append ( 'libchtslib' ) so = sysconfig . get_config_var ( 'SO' ) return [ os . path . join ( dirname , x + so ) for x in pysam_libs ]
return a list of libraries to link against .
189
9
226,246
def has_edge ( self , edge ) : u , v = edge return ( u , v ) in self . edge_properties
Return whether an edge exists .
27
6
226,247
def dump_package_data ( data , buf , format_ = FileFormat . py , skip_attributes = None ) : if format_ == FileFormat . txt : raise ValueError ( "'txt' format not supported for packages." ) data_ = dict ( ( k , v ) for k , v in data . iteritems ( ) if v is not None ) data_ = package_serialise_schema . validate ( data_ ) skip = set ( skip_attributes or [ ] ) items = [ ] for key in package_key_order : if key not in skip : value = data_ . pop ( key , None ) if value is not None : items . append ( ( key , value ) ) # remaining are arbitrary keys for key , value in data_ . iteritems ( ) : if key not in skip : items . append ( ( key , value ) ) dump_func = dump_functions [ format_ ] dump_func ( items , buf )
Write package data to buf .
206
6
226,248
def set_authors ( data ) : if "authors" in data : return shfile = os . path . join ( os . path . dirname ( __file__ ) , "get_committers.sh" ) p = subprocess . Popen ( [ "bash" , shfile ] , stdout = subprocess . PIPE ) out , _ = p . communicate ( ) if p . returncode : return authors = out . strip ( ) . split ( '\n' ) authors = [ x . strip ( ) for x in authors ] data [ "authors" ] = authors
Add authors attribute based on repo contributions
125
7
226,249
def make_package ( name , path , make_base = None , make_root = None , skip_existing = True , warn_on_skip = True ) : maker = PackageMaker ( name ) yield maker # post-with-block: # package = maker . get_package ( ) cwd = os . getcwd ( ) src_variants = [ ] # skip those variants that already exist if skip_existing : for variant in package . iter_variants ( ) : variant_ = variant . install ( path , dry_run = True ) if variant_ is None : src_variants . append ( variant ) else : maker . skipped_variants . append ( variant_ ) if warn_on_skip : print_warning ( "Skipping installation: Package variant already " "exists: %s" % variant_ . uri ) else : src_variants = package . iter_variants ( ) with retain_cwd ( ) : # install the package variant(s) into the filesystem package repo at `path` for variant in src_variants : variant_ = variant . install ( path ) base = variant_ . base if make_base and base : if not os . path . exists ( base ) : os . makedirs ( base ) os . chdir ( base ) make_base ( variant_ , base ) root = variant_ . root if make_root and root : if not os . path . exists ( root ) : os . makedirs ( root ) os . chdir ( root ) make_root ( variant_ , root ) maker . installed_variants . append ( variant_ )
Make and install a package .
347
6
226,250
def get_package ( self ) : # get and validate package data package_data = self . _get_data ( ) package_data = package_schema . validate ( package_data ) # check compatibility with rez version if "requires_rez_version" in package_data : ver = package_data . pop ( "requires_rez_version" ) if _rez_Version < ver : raise PackageMetadataError ( "Failed reading package definition file: rez version >= %s " "needed (current version is %s)" % ( ver , _rez_Version ) ) # create a 'memory' package repository containing just this package version_str = package_data . get ( "version" ) or "_NO_VERSION" repo_data = { self . name : { version_str : package_data } } repo = create_memory_package_repository ( repo_data ) # retrieve the package from the new repository family_resource = repo . get_package_family ( self . name ) it = repo . iter_packages ( family_resource ) package_resource = it . next ( ) package = self . package_cls ( package_resource ) # revalidate the package for extra measure package . validate_data ( ) return package
Create the analogous package .
268
5
226,251
def getTokensEndLoc ( ) : import inspect fstack = inspect . stack ( ) try : # search up the stack (through intervening argument normalizers) for correct calling routine for f in fstack [ 2 : ] : if f [ 3 ] == "_parseNoCache" : endloc = f [ 0 ] . f_locals [ "loc" ] return endloc else : raise ParseFatalException ( "incorrect usage of getTokensEndLoc - may only be called from within a parse action" ) finally : del fstack
Method to be called from within a parse action to determine the end location of the parsed tokens .
113
19
226,252
def schema_keys ( schema ) : def _get_leaf ( value ) : if isinstance ( value , Schema ) : return _get_leaf ( value . _schema ) return value keys = set ( ) dict_ = schema . _schema assert isinstance ( dict_ , dict ) for key in dict_ . iterkeys ( ) : key_ = _get_leaf ( key ) if isinstance ( key_ , basestring ) : keys . add ( key_ ) return keys
Get the string values of keys in a dict - based schema .
105
13
226,253
def dict_to_schema ( schema_dict , required , allow_custom_keys = True , modifier = None ) : if modifier : modifier = Use ( modifier ) def _to ( value ) : if isinstance ( value , dict ) : d = { } for k , v in value . iteritems ( ) : if isinstance ( k , basestring ) : k = Required ( k ) if required else Optional ( k ) d [ k ] = _to ( v ) if allow_custom_keys : d [ Optional ( basestring ) ] = modifier or object schema = Schema ( d ) elif modifier : schema = And ( value , modifier ) else : schema = value return schema return _to ( schema_dict )
Convert a dict of Schemas into a Schema .
156
13
226,254
def enter_diff_mode ( self , context_model = None ) : assert not self . diff_mode self . diff_mode = True if context_model is None : self . diff_from_source = True self . diff_context_model = self . context_model . copy ( ) else : self . diff_from_source = False self . diff_context_model = context_model self . clear ( ) self . setColumnCount ( 5 ) self . refresh ( )
Enter diff mode .
103
4
226,255
def leave_diff_mode ( self ) : assert self . diff_mode self . diff_mode = False self . diff_context_model = None self . diff_from_source = False self . setColumnCount ( 2 ) self . refresh ( )
Leave diff mode .
54
4
226,256
def get_title ( self ) : def _title ( context_model ) : context = context_model . context ( ) if context is None : return "new context*" title = os . path . basename ( context . load_path ) if context . load_path else "new context" if context_model . is_modified ( ) : title += '*' return title if self . diff_mode : diff_title = _title ( self . diff_context_model ) if self . diff_from_source : diff_title += "'" return "%s %s %s" % ( _title ( self . context_model ) , self . short_double_arrow , diff_title ) else : return _title ( self . context_model )
Returns a string suitable for titling a window containing this table .
162
13
226,257
def _color_level ( str_ , level ) : fore_color , back_color , styles = _get_style_from_config ( level ) return _color ( str_ , fore_color , back_color , styles )
Return the string wrapped with the appropriate styling for the message level . The styling will be determined based on the rez configuration .
50
25
226,258
def _color ( str_ , fore_color = None , back_color = None , styles = None ) : # TODO: Colorama is documented to work on Windows and trivial test case # proves this to be the case, but it doesn't work in Rez. If the initialise # is called in sec/rez/__init__.py then it does work, however as discussed # in the following comment this is not always desirable. So until we can # work out why we forcibly turn it off. if not config . get ( "color_enabled" , False ) or platform_ . name == "windows" : return str_ # lazily init colorama. This is important - we don't want to init at startup, # because colorama prints a RESET_ALL character atexit. This in turn adds # unexpected output when capturing the output of a command run in a # ResolvedContext, for example. _init_colorama ( ) colored = "" if not styles : styles = [ ] if fore_color : colored += getattr ( colorama . Fore , fore_color . upper ( ) , '' ) if back_color : colored += getattr ( colorama . Back , back_color . upper ( ) , '' ) for style in styles : colored += getattr ( colorama . Style , style . upper ( ) , '' ) return colored + str_ + colorama . Style . RESET_ALL
Return the string wrapped with the appropriate styling escape sequences .
298
11
226,259
def late ( ) : from rez . package_resources_ import package_rex_keys def decorated ( fn ) : # this is done here rather than in standard schema validation because # the latter causes a very obfuscated error message if fn . __name__ in package_rex_keys : raise ValueError ( "Cannot use @late decorator on function '%s'" % fn . __name__ ) setattr ( fn , "_late" , True ) _add_decorator ( fn , "late" ) return fn return decorated
Used by functions in package . py that are evaluated lazily .
113
13
226,260
def include ( module_name , * module_names ) : def decorated ( fn ) : _add_decorator ( fn , "include" , nargs = [ module_name ] + list ( module_names ) ) return fn return decorated
Used by functions in package . py to have access to named modules .
52
14
226,261
def iter_package_families ( paths = None ) : for path in ( paths or config . packages_path ) : repo = package_repository_manager . get_repository ( path ) for resource in repo . iter_package_families ( ) : yield PackageFamily ( resource )
Iterate over package families in no particular order .
64
10
226,262
def iter_packages ( name , range_ = None , paths = None ) : entries = _get_families ( name , paths ) seen = set ( ) for repo , family_resource in entries : for package_resource in repo . iter_packages ( family_resource ) : key = ( package_resource . name , package_resource . version ) if key in seen : continue seen . add ( key ) if range_ : if isinstance ( range_ , basestring ) : range_ = VersionRange ( range_ ) if package_resource . version not in range_ : continue yield Package ( package_resource )
Iterate over Package instances in no particular order .
130
10
226,263
def get_package ( name , version , paths = None ) : if isinstance ( version , basestring ) : range_ = VersionRange ( "==%s" % version ) else : range_ = VersionRange . from_version ( version , "==" ) it = iter_packages ( name , range_ , paths ) try : return it . next ( ) except StopIteration : return None
Get an exact version of a package .
85
8
226,264
def get_package_from_string ( txt , paths = None ) : o = VersionedObject ( txt ) return get_package ( o . name , o . version , paths = paths )
Get a package given a string .
43
7
226,265
def get_developer_package ( path , format = None ) : from rez . developer_package import DeveloperPackage return DeveloperPackage . from_path ( path , format = format )
Create a developer package .
39
5
226,266
def create_package ( name , data , package_cls = None ) : from rez . package_maker__ import PackageMaker maker = PackageMaker ( name , data , package_cls = package_cls ) return maker . get_package ( )
Create a package given package data .
55
7
226,267
def get_last_release_time ( name , paths = None ) : entries = _get_families ( name , paths ) max_time = 0 for repo , family_resource in entries : time_ = repo . get_last_release_time ( family_resource ) if time_ == 0 : return 0 max_time = max ( max_time , time_ ) return max_time
Returns the most recent time this package was released .
84
10
226,268
def get_completions ( prefix , paths = None , family_only = False ) : op = None if prefix : if prefix [ 0 ] in ( '!' , '~' ) : if family_only : return set ( ) op = prefix [ 0 ] prefix = prefix [ 1 : ] fam = None for ch in ( '-' , '@' , '#' ) : if ch in prefix : if family_only : return set ( ) fam = prefix . split ( ch ) [ 0 ] break words = set ( ) if not fam : words = set ( x . name for x in iter_package_families ( paths = paths ) if x . name . startswith ( prefix ) ) if len ( words ) == 1 : fam = iter ( words ) . next ( ) if family_only : return words if fam : it = iter_packages ( fam , paths = paths ) words . update ( x . qualified_name for x in it if x . qualified_name . startswith ( prefix ) ) if op : words = set ( op + x for x in words ) return words
Get autocompletion options given a prefix string .
234
10
226,269
def iter_packages ( self ) : for package in self . repository . iter_packages ( self . resource ) : yield Package ( package )
Iterate over the packages within this family in no particular order .
29
13
226,270
def is_local ( self ) : local_repo = package_repository_manager . get_repository ( self . config . local_packages_path ) return ( self . resource . _repository . uid == local_repo . uid )
Returns True if the package is in the local package repository
59
11
226,271
def print_info ( self , buf = None , format_ = FileFormat . yaml , skip_attributes = None , include_release = False ) : data = self . validated_data ( ) . copy ( ) # config is a special case. We only really want to show any config settings # that were in the package.py, not the entire Config contents that get # grafted onto the Package/Variant instance. However Variant has an empy # 'data' dict property, since it forwards data from its parent package. data . pop ( "config" , None ) if self . config : if isinstance ( self , Package ) : config_dict = self . data . get ( "config" ) else : config_dict = self . parent . data . get ( "config" ) data [ "config" ] = config_dict if not include_release : skip_attributes = list ( skip_attributes or [ ] ) + list ( package_release_keys ) buf = buf or sys . stdout dump_package_data ( data , buf = buf , format_ = format_ , skip_attributes = skip_attributes )
Print the contents of the package .
244
7
226,272
def qualified_name ( self ) : o = VersionedObject . construct ( self . name , self . version ) return str ( o )
Get the qualified name of the package .
29
8
226,273
def parent ( self ) : family = self . repository . get_parent_package_family ( self . resource ) return PackageFamily ( family ) if family else None
Get the parent package family .
34
6
226,274
def iter_variants ( self ) : for variant in self . repository . iter_variants ( self . resource ) : yield Variant ( variant , context = self . context , parent = self )
Iterate over the variants within this package in index order .
41
12
226,275
def get_variant ( self , index = None ) : for variant in self . iter_variants ( ) : if variant . index == index : return variant
Get the variant with the associated index .
34
8
226,276
def qualified_name ( self ) : idxstr = '' if self . index is None else str ( self . index ) return "%s[%s]" % ( self . qualified_package_name , idxstr )
Get the qualified name of the variant .
47
8
226,277
def parent ( self ) : if self . _parent is not None : return self . _parent try : package = self . repository . get_parent_package ( self . resource ) self . _parent = Package ( package , context = self . context ) except AttributeError as e : reraise ( e , ValueError ) return self . _parent
Get the parent package .
73
5
226,278
def get_requires ( self , build_requires = False , private_build_requires = False ) : requires = self . requires or [ ] if build_requires : requires = requires + ( self . build_requires or [ ] ) if private_build_requires : requires = requires + ( self . private_build_requires or [ ] ) return requires
Get the requirements of the variant .
74
7
226,279
def install ( self , path , dry_run = False , overrides = None ) : repo = package_repository_manager . get_repository ( path ) resource = repo . install_variant ( self . resource , dry_run = dry_run , overrides = overrides ) if resource is None : return None elif resource is self . resource : return self else : return Variant ( resource )
Install this variant into another package repository .
88
8
226,280
def open_file_for_write ( filepath , mode = None ) : stream = StringIO ( ) yield stream content = stream . getvalue ( ) filepath = os . path . realpath ( filepath ) tmpdir = tmpdir_manager . mkdtemp ( ) cache_filepath = os . path . join ( tmpdir , os . path . basename ( filepath ) ) debug_print ( "Writing to %s (local cache of %s)" , cache_filepath , filepath ) with atomic_write ( filepath , overwrite = True ) as f : f . write ( content ) if mode is not None : os . chmod ( filepath , mode ) with open ( cache_filepath , 'w' ) as f : f . write ( content ) file_cache [ filepath ] = cache_filepath
Writes both to given filepath and tmpdir location .
179
12
226,281
def load_py ( stream , filepath = None ) : with add_sys_paths ( config . package_definition_build_python_paths ) : return _load_py ( stream , filepath = filepath )
Load python - formatted data from a stream .
49
9
226,282
def process_python_objects ( data , filepath = None ) : def _process ( value ) : if isinstance ( value , dict ) : for k , v in value . items ( ) : value [ k ] = _process ( v ) return value elif isfunction ( value ) : func = value if hasattr ( func , "_early" ) : # run the function now, and replace with return value # # make a copy of the func with its own globals, and add 'this' import types fn = types . FunctionType ( func . func_code , func . func_globals . copy ( ) , name = func . func_name , argdefs = func . func_defaults , closure = func . func_closure ) # apply globals fn . func_globals [ "this" ] = EarlyThis ( data ) fn . func_globals . update ( get_objects ( ) ) # execute the function spec = getargspec ( func ) args = spec . args or [ ] if len ( args ) not in ( 0 , 1 ) : raise ResourceError ( "@early decorated function must " "take zero or one args only" ) if args : # this 'data' arg support isn't needed anymore, but I'm # supporting it til I know nobody is using it... # value_ = fn ( data ) else : value_ = fn ( ) # process again in case this is a function returning a function return _process ( value_ ) elif hasattr ( func , "_late" ) : return SourceCode ( func = func , filepath = filepath , eval_as_function = True ) elif func . __name__ in package_rex_keys : # if a rex function, the code has to be eval'd NOT as a function, # otherwise the globals dict doesn't get updated with any vars # defined in the code, and that means rex code like this: # # rr = 'test' # env.RR = '{rr}' # # ..won't work. It was never intentional that the above work, but # it does, so now we have to keep it so. # return SourceCode ( func = func , filepath = filepath , eval_as_function = False ) else : # a normal function. Leave unchanged, it will be stripped after return func else : return value def _trim ( value ) : if isinstance ( value , dict ) : for k , v in value . items ( ) : if isfunction ( v ) : if v . __name__ == "preprocess" : # preprocess is a special case. It has to stay intact # until the `DeveloperPackage` has a chance to apply it; # after which it gets removed from the package attributes. # pass else : del value [ k ] elif ismodule ( v ) or k . startswith ( "__" ) : del value [ k ] else : value [ k ] = _trim ( v ) return value data = _process ( data ) data = _trim ( data ) return data
Replace certain values in the given package data dict .
652
11
226,283
def load_yaml ( stream , * * kwargs ) : # if there's an error parsing the yaml, and you pass yaml.load a string, # it will print lines of context, but will print "<string>" instead of a # filename; if you pass a stream, it will print the filename, but no lines # of context. # Get the best of both worlds, by passing it a string, then replacing # "<string>" with the filename if there's an error... content = stream . read ( ) try : return yaml . load ( content ) or { } except Exception , e : if stream . name and stream . name != '<string>' : for mark_name in 'context_mark' , 'problem_mark' : mark = getattr ( e , mark_name , None ) if mark is None : continue if getattr ( mark , 'name' ) == '<string>' : mark . name = stream . name raise e
Load yaml - formatted data from a stream .
206
10
226,284
def _blocked ( self , args ) : reason = args . read_shortstr ( ) if self . on_blocked : return self . on_blocked ( reason )
RabbitMQ Extension .
38
5
226,285
def _x_secure_ok ( self , response ) : args = AMQPWriter ( ) args . write_longstr ( response ) self . _send_method ( ( 10 , 21 ) , args )
Security mechanism response
45
3
226,286
def _x_start_ok ( self , client_properties , mechanism , response , locale ) : if self . server_capabilities . get ( 'consumer_cancel_notify' ) : if 'capabilities' not in client_properties : client_properties [ 'capabilities' ] = { } client_properties [ 'capabilities' ] [ 'consumer_cancel_notify' ] = True if self . server_capabilities . get ( 'connection.blocked' ) : if 'capabilities' not in client_properties : client_properties [ 'capabilities' ] = { } client_properties [ 'capabilities' ] [ 'connection.blocked' ] = True args = AMQPWriter ( ) args . write_table ( client_properties ) args . write_shortstr ( mechanism ) args . write_longstr ( response ) args . write_shortstr ( locale ) self . _send_method ( ( 10 , 11 ) , args )
Select security mechanism and locale
207
5
226,287
def _tune ( self , args ) : client_heartbeat = self . client_heartbeat or 0 self . channel_max = args . read_short ( ) or self . channel_max self . frame_max = args . read_long ( ) or self . frame_max self . method_writer . frame_max = self . frame_max self . server_heartbeat = args . read_short ( ) or 0 # negotiate the heartbeat interval to the smaller of the # specified values if self . server_heartbeat == 0 or client_heartbeat == 0 : self . heartbeat = max ( self . server_heartbeat , client_heartbeat ) else : self . heartbeat = min ( self . server_heartbeat , client_heartbeat ) # Ignore server heartbeat if client_heartbeat is disabled if not self . client_heartbeat : self . heartbeat = 0 self . _x_tune_ok ( self . channel_max , self . frame_max , self . heartbeat )
Propose connection tuning parameters
211
5
226,288
def heartbeat_tick ( self , rate = 2 ) : if not self . heartbeat : return # treat actual data exchange in either direction as a heartbeat sent_now = self . method_writer . bytes_sent recv_now = self . method_reader . bytes_recv if self . prev_sent is None or self . prev_sent != sent_now : self . last_heartbeat_sent = monotonic ( ) if self . prev_recv is None or self . prev_recv != recv_now : self . last_heartbeat_received = monotonic ( ) self . prev_sent , self . prev_recv = sent_now , recv_now # send a heartbeat if it's time to do so if monotonic ( ) > self . last_heartbeat_sent + self . heartbeat : self . send_heartbeat ( ) self . last_heartbeat_sent = monotonic ( ) # if we've missed two intervals' heartbeats, fail; this gives the # server enough time to send heartbeats a little late if ( self . last_heartbeat_received and self . last_heartbeat_received + 2 * self . heartbeat < monotonic ( ) ) : raise ConnectionForced ( 'Too many heartbeats missed' )
Send heartbeat packets if necessary and fail if none have been received recently . This should be called frequently on the order of once per second .
277
27
226,289
def _x_tune_ok ( self , channel_max , frame_max , heartbeat ) : args = AMQPWriter ( ) args . write_short ( channel_max ) args . write_long ( frame_max ) args . write_short ( heartbeat or 0 ) self . _send_method ( ( 10 , 31 ) , args ) self . _wait_tune_ok = False
Negotiate connection tuning parameters
86
6
226,290
def parent_suite ( self ) : if self . context and self . context . parent_suite_path : return Suite . load ( self . context . parent_suite_path ) return None
Get the current parent suite .
43
6
226,291
def print_info ( self , obj = None , buf = sys . stdout ) : if not obj : self . _print_info ( buf ) return True b = False for fn in ( self . _print_tool_info , self . _print_package_info , self . _print_suite_info , self . _print_context_info ) : b_ = fn ( obj , buf , b ) b |= b_ if b_ : print >> buf , '' if not b : print >> buf , "Rez does not know what '%s' is" % obj return b
Print a status message about the given object .
129
9
226,292
def print_tools ( self , pattern = None , buf = sys . stdout ) : seen = set ( ) rows = [ ] context = self . context if context : data = context . get_tools ( ) conflicts = set ( context . get_conflicting_tools ( ) . keys ( ) ) for _ , ( variant , tools ) in sorted ( data . items ( ) ) : pkg_str = variant . qualified_package_name for tool in tools : if pattern and not fnmatch ( tool , pattern ) : continue if tool in conflicts : label = "(in conflict)" color = critical else : label = '' color = None rows . append ( [ tool , '-' , pkg_str , "active context" , label , color ] ) seen . add ( tool ) for suite in self . suites : for tool , d in suite . get_tools ( ) . iteritems ( ) : if tool in seen : continue if pattern and not fnmatch ( tool , pattern ) : continue label = [ ] color = None path = which ( tool ) if path : path_ = os . path . join ( suite . tools_path , tool ) if path != path_ : label . append ( "(hidden by unknown tool '%s')" % path ) color = warning variant = d [ "variant" ] if isinstance ( variant , set ) : pkg_str = ", " . join ( variant ) label . append ( "(in conflict)" ) color = critical else : pkg_str = variant . qualified_package_name orig_tool = d [ "tool_name" ] if orig_tool == tool : orig_tool = '-' label = ' ' . join ( label ) source = ( "context '%s' in suite '%s'" % ( d [ "context_name" ] , suite . load_path ) ) rows . append ( [ tool , orig_tool , pkg_str , source , label , color ] ) seen . add ( tool ) _pr = Printer ( buf ) if not rows : _pr ( "No matching tools." ) return False headers = [ [ "TOOL" , "ALIASING" , "PACKAGE" , "SOURCE" , "" , None ] , [ "----" , "--------" , "-------" , "------" , "" , None ] ] rows = headers + sorted ( rows , key = lambda x : x [ 0 ] . lower ( ) ) print_colored_columns ( _pr , rows ) return True
Print a list of visible tools .
532
7
226,293
def from_path ( cls , path , format = None ) : name = None data = None if format is None : formats = ( FileFormat . py , FileFormat . yaml ) else : formats = ( format , ) try : mode = os . stat ( path ) . st_mode except ( IOError , OSError ) : raise PackageMetadataError ( "Path %r did not exist, or was not accessible" % path ) is_dir = stat . S_ISDIR ( mode ) for name_ in config . plugins . package_repository . filesystem . package_filenames : for format_ in formats : if is_dir : filepath = os . path . join ( path , "%s.%s" % ( name_ , format_ . extension ) ) exists = os . path . isfile ( filepath ) else : # if format was not specified, verify that it has the # right extension before trying to load if format is None : if os . path . splitext ( path ) [ 1 ] != format_ . extension : continue filepath = path exists = True if exists : data = load_from_file ( filepath , format_ , disable_memcache = True ) break if data : name = data . get ( "name" ) if name is not None or isinstance ( name , basestring ) : break if data is None : raise PackageMetadataError ( "No package definition file found at %s" % path ) if name is None or not isinstance ( name , basestring ) : raise PackageMetadataError ( "Error in %r - missing or non-string field 'name'" % filepath ) package = create_package ( name , data , package_cls = cls ) # preprocessing result = package . _get_preprocessed ( data ) if result : package , data = result package . filepath = filepath # find all includes, this is needed at install time to copy the right # py sourcefiles into the package installation package . includes = set ( ) def visit ( d ) : for k , v in d . iteritems ( ) : if isinstance ( v , SourceCode ) : package . includes |= ( v . includes or set ( ) ) elif isinstance ( v , dict ) : visit ( v ) visit ( data ) package . _validate_includes ( ) return package
Load a developer package .
505
5
226,294
def dump_all ( documents , stream = None , Dumper = Dumper , default_style = None , default_flow_style = None , canonical = None , indent = None , width = None , allow_unicode = None , line_break = None , encoding = 'utf-8' , explicit_start = None , explicit_end = None , version = None , tags = None ) : getvalue = None if stream is None : if encoding is None : from StringIO import StringIO else : from cStringIO import StringIO stream = StringIO ( ) getvalue = stream . getvalue dumper = Dumper ( stream , default_style = default_style , default_flow_style = default_flow_style , canonical = canonical , indent = indent , width = width , allow_unicode = allow_unicode , line_break = line_break , encoding = encoding , version = version , tags = tags , explicit_start = explicit_start , explicit_end = explicit_end ) try : dumper . open ( ) for data in documents : dumper . represent ( data ) dumper . close ( ) finally : dumper . dispose ( ) if getvalue : return getvalue ( )
Serialize a sequence of Python objects into a YAML stream . If stream is None return the produced string instead .
256
24
226,295
def running_instances ( self , context , process_name ) : handle = ( id ( context ) , process_name ) it = self . processes . get ( handle , { } ) . itervalues ( ) entries = [ x for x in it if x [ 0 ] . poll ( ) is None ] return entries
Get a list of running instances .
69
7
226,296
def get_public_methods ( self ) : return self . get_action_methods ( ) + [ ( 'getenv' , self . getenv ) , ( 'expandvars' , self . expandvars ) , ( 'defined' , self . defined ) , ( 'undefined' , self . undefined ) ]
return a list of methods on this class which should be exposed in the rex API .
72
18
226,297
def apply_environ ( self ) : if self . manager is None : raise RezSystemError ( "You must call 'set_manager' on a Python rex " "interpreter before using it." ) self . target_environ . update ( self . manager . environ )
Apply changes to target environ .
61
7
226,298
def formatted ( self , func ) : other = EscapedString . __new__ ( EscapedString ) other . strings = [ ] for is_literal , value in self . strings : if not is_literal : value = func ( value ) other . strings . append ( ( is_literal , value ) ) return other
Return the string with non - literal parts formatted .
70
10
226,299
def execute_code ( self , code , filename = None , isolate = False ) : def _apply ( ) : self . compile_code ( code = code , filename = filename , exec_namespace = self . globals ) # we want to execute the code using self.globals - if for no other # reason that self.formatter is pointing at self.globals, so if we # passed in a copy, we would also need to make self.formatter "look" at # the same copy - but we don't want to "pollute" our namespace, because # the same executor may be used to run multiple packages. Therefore, # we save a copy of self.globals before execution, and restore it after # if isolate : saved_globals = dict ( self . globals ) try : _apply ( ) finally : self . globals . clear ( ) self . globals . update ( saved_globals ) else : _apply ( )
Execute code within the execution context .
209
8