idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
32,700
def _create_buffers ( self ) : self . buffers = { } for step in self . graph . nodes ( ) : num_buffers = 1 if isinstance ( step , Reduction ) : num_buffers = len ( step . parents ) self . buffers [ step ] = Buffer ( step . min_frames , step . left_context , step . right_context , num_buffers ) return self . buffers
Create a buffer for every step in the pipeline .
32,701
def save ( self , writer = None ) : if self . path is None : raise ValueError ( 'No path given to save the data set.' ) self . save_at ( self . path , writer )
If self . path is defined it tries to save the corpus at the given path .
32,702
def save_at ( self , path , writer = None ) : if writer is None : from . import io writer = io . DefaultWriter ( ) elif type ( writer ) == str : from . import io writer = io . create_writer_of_type ( writer ) writer . save ( self , path ) self . path = path
Save this corpus at the given path . If the path differs from the current path set the path gets updated .
32,703
def new_file ( self , path , track_idx , copy_file = False ) : new_file_idx = track_idx new_file_path = os . path . abspath ( path ) if new_file_idx in self . _tracks . keys ( ) : new_file_idx = naming . index_name_if_in_list ( new_file_idx , self . _tracks . keys ( ) ) if copy_file : if not os . path . isdir ( self . ...
Adds a new audio file to the corpus with the given data .
32,704
def new_utterance ( self , utterance_idx , track_idx , issuer_idx = None , start = 0 , end = float ( 'inf' ) ) : new_utt_idx = utterance_idx if track_idx not in self . _tracks . keys ( ) : raise ValueError ( 'Track with id {} does not exist!' . format ( track_idx ) ) issuer = None if issuer_idx is not None : if issuer_...
Add a new utterance to the corpus with the given data .
32,705
def new_issuer ( self , issuer_idx , info = None ) : new_issuer_idx = issuer_idx if new_issuer_idx in self . _issuers . keys ( ) : new_issuer_idx = naming . index_name_if_in_list ( new_issuer_idx , self . _issuers . keys ( ) ) new_issuer = issuers . Issuer ( new_issuer_idx , info = info ) self . _issuers [ new_issuer_i...
Add a new issuer to the dataset with the given data .
32,706
def new_feature_container ( self , idx , path = None ) : new_feature_idx = idx new_feature_path = path if new_feature_idx in self . _feature_containers . keys ( ) : new_feature_idx = naming . index_name_if_in_list ( new_feature_idx , self . _feature_containers . keys ( ) ) if new_feature_path is None : if not os . path...
Add a new feature container with the given data .
32,707
def import_subview ( self , idx , subview ) : subview . corpus = self self . _subviews [ idx ] = subview
Add the given subview to the corpus .
32,708
def relocate_audio_to_single_container ( self , target_path ) : cont = containers . AudioContainer ( target_path ) cont . open ( ) new_tracks = { } for track in self . tracks . values ( ) : sr = track . sampling_rate samples = track . read_samples ( ) cont . set ( track . idx , samples , sr ) new_track = tracks . Conta...
Copies every track to a single container . Afterwards all tracks in the container are linked against this single container .
32,709
def from_corpus ( cls , corpus ) : ds = Corpus ( ) tracks = copy . deepcopy ( list ( corpus . tracks . values ( ) ) ) track_mapping = ds . import_tracks ( tracks ) issuers = copy . deepcopy ( list ( corpus . issuers . values ( ) ) ) issuer_mapping = ds . import_issuers ( issuers ) utterances = copy . deepcopy ( list ( ...
Create a new modifiable corpus from any other CorpusView . This for example can be used to create a independent modifiable corpus from a subview .
32,710
def merge_corpora ( cls , corpora ) : ds = Corpus ( ) for merging_corpus in corpora : ds . merge_corpus ( merging_corpus ) return ds
Merge a list of corpora into one .
32,711
def load_subset ( corpus , path , subset_idx ) : csv_file = os . path . join ( path , '{}.tsv' . format ( subset_idx ) ) subset_utt_ids = [ ] entries = textfile . read_separated_lines_generator ( csv_file , separator = '\t' , max_columns = 8 , ignore_lines_starting_with = [ 'client_id' ] , keep_empty = True ) for entry...
Load subset into corpus .
32,712
def map_age ( age ) : if age in [ None , '' ] : return issuers . AgeGroup . UNKNOWN elif age == 'teens' : return issuers . AgeGroup . YOUTH elif age in [ 'sixties' , 'seventies' , 'eighties' , 'nineties' ] : return issuers . AgeGroup . SENIOR else : return issuers . AgeGroup . ADULT
Map age to correct age - group .
32,713
def map_gender ( gender ) : if gender == 'male' : return issuers . Gender . MALE elif gender == 'female' : return issuers . Gender . FEMALE else : return issuers . Gender . UNKNOWN
Map gender to correct value .
32,714
def num_frames ( self , num_samples ) : return math . ceil ( float ( max ( num_samples - self . frame_size , 0 ) ) / float ( self . hop_size ) ) + 1
Return the number of frames that will be used for a signal with the length of num_samples .
32,715
def frame_to_seconds ( self , frame_index , sr ) : start_sample , end_sample = self . frame_to_sample ( frame_index ) return sample_to_seconds ( start_sample , sampling_rate = sr ) , sample_to_seconds ( end_sample , sampling_rate = sr )
Return a tuple containing the start and end of the frame in seconds .
32,716
def time_range_to_frame_range ( self , start , end , sr ) : start_sample = seconds_to_sample ( start , sr ) end_sample = seconds_to_sample ( end , sr ) return self . sample_to_frame_range ( start_sample ) [ 0 ] , self . sample_to_frame_range ( end_sample - 1 ) [ 1 ]
Calculate the frames containing samples from the given time range in seconds .
32,717
def _process_corpus ( self , corpus , output_path , processing_func , frame_size = 400 , hop_size = 160 , sr = None ) : feat_container = containers . FeatureContainer ( output_path ) feat_container . open ( ) sampling_rate = - 1 for utterance in corpus . utterances . values ( ) : utt_sampling_rate = utterance . samplin...
Utility function for processing a corpus with a separate processing function .
32,718
def process_buffer ( buffer , n_channels ) : samples = np . concatenate ( buffer ) if n_channels > 1 : samples = samples . reshape ( ( - 1 , n_channels ) ) . T samples = librosa . to_mono ( samples ) return samples
Merge the read blocks and resample if necessary .
32,719
def read_blocks ( file_path , start = 0.0 , end = float ( 'inf' ) , buffer_size = 5760000 ) : buffer = [ ] n_buffer = 0 n_samples = 0 with audioread . audio_open ( file_path ) as input_file : n_channels = input_file . channels sr_native = input_file . samplerate start_sample = int ( np . round ( sr_native * start ) ) *...
Read an audio file block after block . The blocks are yielded one by one .
32,720
def read_frames ( file_path , frame_size , hop_size , start = 0.0 , end = float ( 'inf' ) , buffer_size = 5760000 ) : rest_samples = np . array ( [ ] , dtype = np . float32 ) for block in read_blocks ( file_path , start = start , end = end , buffer_size = buffer_size ) : block = np . concatenate ( [ rest_samples , bloc...
Read an audio file frame by frame . The frames are yielded one after another .
32,721
def write_wav ( path , samples , sr = 16000 ) : max_value = np . abs ( np . iinfo ( np . int16 ) . min ) data = ( samples * max_value ) . astype ( np . int16 ) scipy . io . wavfile . write ( path , sr , data )
Write to given samples to a wav file . The samples are expected to be floating point numbers in the range of - 1 . 0 to 1 . 0 .
32,722
def to_dict ( self ) : return { 'mean' : self . mean , 'var' : self . var , 'min' : self . min , 'max' : self . max , 'num' : self . num }
Return the stats as a dictionary .
32,723
def concatenate ( cls , list_of_stats ) : all_stats = np . stack ( [ stats . values for stats in list_of_stats ] ) all_counts = all_stats [ : , 4 ] all_counts_relative = all_counts / np . sum ( all_counts ) min_value = float ( np . min ( all_stats [ : , 2 ] ) ) max_value = float ( np . max ( all_stats [ : , 3 ] ) ) mea...
Take a list of stats from different sets of data points and merge the stats for getting stats overall data points .
32,724
def generate_corpus ( n_issuers , n_tracks_per_issuer , n_utts_per_track , n_ll_per_utt , n_label_per_ll , rand = None ) : corpus = audiomate . Corpus ( ) for issuer in generate_issuers ( n_issuers , rand ) : corpus . import_issuers ( issuer ) n_tracks = rand . randint ( * n_tracks_per_issuer ) tracks = generate_tracks...
Generate a corpus with mock data .
32,725
def _create_subviews ( path , corpus ) : test_list_path = os . path . join ( path , 'testing_list.txt' ) dev_list_path = os . path . join ( path , 'validation_list.txt' ) test_list = textfile . read_separated_lines ( test_list_path , separator = '/' , max_columns = 2 ) dev_list = textfile . read_separated_lines ( dev_l...
Load the subviews based on testing_list . txt and validation_list . txt
32,726
def write_label_list ( path , label_list ) : entries = [ ] for label in label_list : entries . append ( [ label . start , label . end , label . value ] ) textfile . write_separated_lines ( path , entries , separator = '\t' )
Writes the given label_list to an audacity label file .
32,727
def read_label_file ( path ) : labels = [ ] for record in textfile . read_separated_lines_generator ( path , separator = '\t' , max_columns = 3 ) : value = '' if len ( record ) > 2 : value = str ( record [ 2 ] ) labels . append ( [ float ( _clean_time ( record [ 0 ] ) ) , float ( _clean_time ( record [ 1 ] ) ) , value ...
Read the labels from an audacity label file .
32,728
def extract_zip ( zip_path , target_folder ) : with zipfile . ZipFile ( zip_path ) as archive : archive . extractall ( target_folder )
Extract the content of the zip - file at zip_path into target_folder .
32,729
def extract_tar ( tar_path , target_folder ) : with tarfile . open ( tar_path , 'r' ) as archive : archive . extractall ( target_folder )
Extract the content of the tar - file at tar_path into target_folder .
32,730
def read_separated_lines ( path , separator = ' ' , max_columns = - 1 , keep_empty = False ) : gen = read_separated_lines_generator ( path , separator , max_columns , keep_empty = keep_empty ) return list ( gen )
Reads a text file where each line represents a record with some separated columns .
32,731
def read_separated_lines_with_first_key ( path : str , separator : str = ' ' , max_columns : int = - 1 , keep_empty : bool = False ) : gen = read_separated_lines_generator ( path , separator , max_columns , keep_empty = keep_empty ) dic = { } for record in gen : if len ( record ) > 0 : dic [ record [ 0 ] ] = record [ 1...
Reads the separated lines of a file and return a dictionary with the first column as keys value is a list with the rest of the columns .
32,732
def write_separated_lines ( path , values , separator = ' ' , sort_by_column = 0 ) : f = open ( path , 'w' , encoding = 'utf-8' ) if type ( values ) is dict : if sort_by_column in [ 0 , 1 ] : items = sorted ( values . items ( ) , key = lambda t : t [ sort_by_column ] ) else : items = values . items ( ) for key , value ...
Writes list or dict to file line by line . Dict can have list as value then they written separated on the line .
32,733
def read_separated_lines_generator ( path , separator = ' ' , max_columns = - 1 , ignore_lines_starting_with = [ ] , keep_empty = False ) : if not os . path . isfile ( path ) : print ( 'File doesnt exist or is no file: {}' . format ( path ) ) return f = open ( path , 'r' , errors = 'ignore' , encoding = 'utf-8' ) if ma...
Creates a generator through all lines of a file and returns the splitted line .
32,734
def update ( self , uid : str , patch = False , data : any = { } ) -> typing . Union [ str , dict ] : methodType = 'put' if patch is False else 'patch' return self . __client . request ( methodType , '/forms/%s' % uid , data = data )
Updates an existing form . Defaults to put . put will return the modified form as a dict object . patch will return a str based on success of change OK on success otherwise an error message .
32,735
def configure ( self , app ) : config = Config ( ) prefix = PREFIX . format ( self . name . upper ( ) ) backend_key = '{0}BACKEND' . format ( prefix ) self . backend_name = app . config . get ( backend_key , app . config [ 'FS_BACKEND' ] ) self . backend_prefix = BACKEND_PREFIX . format ( self . backend_name . upper ( ...
Load configuration from application configuration .
32,736
def base_url ( self ) : config_value = self . config . get ( 'url' ) if config_value : return self . _clean_url ( config_value ) default_url = current_app . config . get ( 'FS_URL' ) default_url = current_app . config . get ( '{0}URL' . format ( self . backend_prefix ) , default_url ) if default_url : url = urljoin ( d...
The public URL for this storage
32,737
def url ( self , filename , external = False ) : if filename . startswith ( '/' ) : filename = filename [ 1 : ] if self . has_url : return self . base_url + filename else : return url_for ( 'fs.get_file' , fs = self . name , filename = filename , _external = external )
This function gets the URL a file uploaded to this set would be accessed at . It doesn t check whether said file exists .
32,738
def path ( self , filename ) : if not self . backend . root : raise OperationNotSupported ( 'Direct file access is not supported by ' + self . backend . __class__ . __name__ ) return os . path . join ( self . backend . root , filename )
This returns the absolute path of a file uploaded to this set . It doesn t actually check whether said file exists .
32,739
def read ( self , filename ) : if not self . backend . exists ( filename ) : raise FileNotFound ( filename ) return self . backend . read ( filename )
Read a file content .
32,740
def open ( self , filename , mode = 'r' , ** kwargs ) : if 'r' in mode and not self . backend . exists ( filename ) : raise FileNotFound ( filename ) return self . backend . open ( filename , mode , ** kwargs )
Open the file and return a file - like object .
32,741
def write ( self , filename , content , overwrite = False ) : if not self . overwrite and not overwrite and self . backend . exists ( filename ) : raise FileExists ( ) return self . backend . write ( filename , content )
Write content to a file .
32,742
def metadata ( self , filename ) : metadata = self . backend . metadata ( filename ) metadata [ 'filename' ] = os . path . basename ( filename ) metadata [ 'url' ] = self . url ( filename , external = True ) return metadata
Get some metadata for a given file .
32,743
def serve ( self , filename ) : if not self . exists ( filename ) : abort ( 404 ) return self . backend . serve ( filename )
Serve a file given its filename
32,744
def make_thumbnail ( file , size , bbox = None ) : image = Image . open ( file ) if bbox : thumbnail = crop_thumbnail ( image , size , bbox ) else : thumbnail = center_thumbnail ( image , size ) return _img_to_file ( thumbnail )
Generate a thumbnail for a given image file .
32,745
def pip ( filename ) : requirements = [ ] for line in open ( join ( ROOT , 'requirements' , filename ) ) : line = line . strip ( ) if not line or '://' in line : continue match = RE_REQUIREMENT . match ( line ) if match : requirements . extend ( pip ( match . group ( 'filename' ) ) ) else : requirements . append ( line...
Parse pip reqs file and transform it to setuptools requirements .
32,746
def move ( self , filename , target ) : self . copy ( filename , target ) self . delete ( filename )
Move a file given its filename to another path in the storage
32,747
def save ( self , file_or_wfs , filename , overwrite = False ) : self . write ( filename , file_or_wfs . read ( ) ) return filename
Save a file - like object or a werkzeug . FileStorage with the specified filename .
32,748
def metadata ( self , filename ) : meta = self . get_metadata ( filename ) meta [ 'mime' ] = meta . get ( 'mime' ) or files . mime ( filename , self . DEFAULT_MIME ) return meta
Fetch all available metadata for a given file
32,749
def as_binary ( self , content , encoding = 'utf8' ) : if hasattr ( content , 'read' ) : return content . read ( ) elif isinstance ( content , six . text_type ) : return content . encode ( encoding ) else : return content
Perform content encoding for binary write
32,750
def get_metadata ( self , filename ) : obj = self . bucket . Object ( filename ) checksum = 'md5:{0}' . format ( obj . e_tag [ 1 : - 1 ] ) mime = obj . content_type . split ( ';' , 1 ) [ 0 ] if obj . content_type else None return { 'checksum' : checksum , 'size' : obj . content_length , 'mime' : mime , 'modified' : obj...
Fetch all availabe metadata
32,751
def thumbnail ( self , size ) : if size in self . thumbnail_sizes : return self . thumbnails . get ( str ( size ) ) else : raise ValueError ( 'Unregistered thumbnail size {0}' . format ( size ) )
Get the thumbnail filename for a given size
32,752
def full ( self , external = False ) : return self . fs . url ( self . filename , external = external ) if self . filename else None
Get the full image URL in respect with max_size
32,753
def best_url ( self , size = None , external = False ) : if not self . thumbnail_sizes : return self . url elif not size : self . thumbnail_sizes . sort ( ) best_size = self . thumbnail_sizes [ - 1 ] else : self . thumbnail_sizes . sort ( ) index = bisect . bisect_left ( self . thumbnail_sizes , size ) if index >= len ...
Provide the best thumbnail for downscaling .
32,754
def rerender ( self ) : with self . fs . open ( self . original , 'rb' ) as f_img : img = io . BytesIO ( f_img . read ( ) ) self . save ( img , filename = self . filename , bbox = self . bbox , overwrite = True )
Rerender all derived images from the original . If optmization settings or expected sizes changed they will be used for the new rendering .
32,755
def get_file ( fs , filename ) : storage = by_name ( fs ) if storage is None : abort ( 404 ) return storage . serve ( filename )
Serve files for storages with direct file access
32,756
def init_app ( app , * storages ) : app . config . setdefault ( 'FS_SERVE' , app . config . get ( 'DEBUG' , False ) ) app . config . setdefault ( 'FS_ROOT' , join ( app . instance_path , 'fs' ) ) app . config . setdefault ( 'FS_PREFIX' , None ) app . config . setdefault ( 'FS_URL' , None ) app . config . setdefault ( '...
Initialize Storages configuration Register blueprint if necessary .
32,757
def get_metadata ( self , filename ) : dest = self . path ( filename ) with open ( dest , 'rb' , buffering = 0 ) as f : checksum = 'sha1:{0}' . format ( sha1 ( f ) ) return { 'checksum' : checksum , 'size' : os . path . getsize ( dest ) , 'mime' : files . mime ( filename ) , 'modified' : datetime . fromtimestamp ( os ....
Fetch all available metadata
32,758
def on_touch_move ( self , touch ) : if touch is not self . _touch : return False self . pos = ( touch . x + self . x_down , touch . y + self . y_down ) return True
Follow the touch
32,759
def clear ( self ) : for k in list ( self . keys ( ) ) : if k not in self . extrakeys : del self [ k ]
Unset everything .
32,760
def loading ( self ) : if getattr ( self , '_initialized' , False ) : raise ValueError ( "Already loading" ) self . _initialized = False yield self . _initialized = True
Context manager for when you need to instantiate entities upon unpacking
32,761
def dice ( self , n , d ) : for i in range ( 0 , n ) : yield self . roll_die ( d )
Roll n dice with d faces and yield the results .
32,762
def dice_check ( self , n , d , target , comparator = '<=' ) : from operator import gt , lt , ge , le , eq , ne comps = { '>' : gt , '<' : lt , '>=' : ge , '<=' : le , '=' : eq , '==' : eq , '!=' : ne } try : comparator = comps . get ( comparator , comparator ) except TypeError : pass return comparator ( sum ( self . d...
Roll n dice with d sides sum them and return whether they are < = target .
32,763
def percent_chance ( self , pct ) : if pct <= 0 : return False if pct >= 100 : return True return pct / 100 < self . random ( )
Given a pct % chance of something happening right now decide at random whether it actually happens and return True or False as appropriate .
32,764
def _remember_avatarness ( self , character , graph , node , is_avatar = True , branch = None , turn = None , tick = None ) : branch = branch or self . branch turn = turn or self . turn tick = tick or self . tick self . _avatarness_cache . store ( character , graph , node , branch , turn , tick , is_avatar ) self . que...
Use this to record a change in avatarness .
32,765
def _init_caches ( self ) : from . xcollections import ( StringStore , FunctionStore , CharacterMapping , UniversalMapping ) from . cache import ( Cache , NodeContentsCache , InitializedCache , EntitylessCache , InitializedEntitylessCache , AvatarnessCache , AvatarRulesHandledCache , CharacterThingRulesHandledCache , C...
The last turn when the rules engine ran in each branch
32,766
def close ( self ) : import sys , os for store in self . stores : if hasattr ( store , 'save' ) : store . save ( reimport = False ) path , filename = os . path . split ( store . _filename ) modname = filename [ : - 3 ] if modname in sys . modules : del sys . modules [ modname ] super ( ) . close ( )
Commit changes and close the database .
32,767
def advance ( self ) : try : return next ( self . _rules_iter ) except InnerStopIteration : self . _rules_iter = self . _follow_rules ( ) return StopIteration ( ) except StopIteration : self . _rules_iter = self . _follow_rules ( ) return final_rule
Follow the next rule if available .
32,768
def del_character ( self , name ) : self . query . del_character ( name ) self . del_graph ( name ) del self . character [ name ]
Remove the Character from the database entirely .
32,769
def alias ( self , v , stat = 'dummy' ) : from . util import EntityStatAccessor r = DummyEntity ( self ) r [ stat ] = v return EntityStatAccessor ( r , stat , engine = self )
Return a representation of a value suitable for use in historical queries .
32,770
def on_play_speed ( self , * args ) : Clock . unschedule ( self . play ) Clock . schedule_interval ( self . play , 1.0 / self . play_speed )
Change the interval at which self . play is called to match my current play_speed .
32,771
def remake_display ( self , * args ) : Builder . load_string ( self . kv ) if hasattr ( self , '_kv_layout' ) : self . remove_widget ( self . _kv_layout ) del self . _kv_layout self . _kv_layout = KvLayout ( ) self . add_widget ( self . _kv_layout )
Remake any affected widgets after a change in my kv .
32,772
def next_turn ( self , * args ) : if self . tmp_block : return eng = self . app . engine dial = self . dialoglayout if eng . universal . get ( 'block' ) : Logger . info ( "MainScreen: next_turn blocked, delete universal['block'] to unblock" ) return if dial . idx < len ( dial . todo ) : Logger . info ( "MainScreen: not...
Advance time by one turn if it s not blocked .
32,773
def setgraphval ( delta , graph , key , val ) : delta . setdefault ( graph , { } ) [ key ] = val
Change a delta to say that a graph stat was set to a certain value
32,774
def setnode ( delta , graph , node , exists ) : delta . setdefault ( graph , { } ) . setdefault ( 'nodes' , { } ) [ node ] = bool ( exists )
Change a delta to say that a node was created or deleted
32,775
def setnodeval ( delta , graph , node , key , value ) : if ( graph in delta and 'nodes' in delta [ graph ] and node in delta [ graph ] [ 'nodes' ] and not delta [ graph ] [ 'nodes' ] [ node ] ) : return delta . setdefault ( graph , { } ) . setdefault ( 'node_val' , { } ) . setdefault ( node , { } ) [ key ] = value
Change a delta to say that a node stat was set to a certain value
32,776
def setedge ( delta , is_multigraph , graph , orig , dest , idx , exists ) : if is_multigraph ( graph ) : delta . setdefault ( graph , { } ) . setdefault ( 'edges' , { } ) . setdefault ( orig , { } ) . setdefault ( dest , { } ) [ idx ] = bool ( exists ) else : delta . setdefault ( graph , { } ) . setdefault ( 'edges' ,...
Change a delta to say that an edge was created or deleted
32,777
def setedgeval ( delta , is_multigraph , graph , orig , dest , idx , key , value ) : if is_multigraph ( graph ) : if ( graph in delta and 'edges' in delta [ graph ] and orig in delta [ graph ] [ 'edges' ] and dest in delta [ graph ] [ 'edges' ] [ orig ] and idx in delta [ graph ] [ 'edges' ] [ orig ] [ dest ] and not d...
Change a delta to say that an edge stat was set to a certain value
32,778
def advancing ( self ) : if self . _forward : raise ValueError ( "Already advancing" ) self . _forward = True yield self . _forward = False
A context manager for when time is moving forward one turn at a time .
32,779
def batch ( self ) : if self . _no_kc : raise ValueError ( "Already in a batch" ) self . _no_kc = True yield self . _no_kc = False
A context manager for when you re creating lots of state .
32,780
def get_delta ( self , branch , turn_from , tick_from , turn_to , tick_to ) : from functools import partial if turn_from == turn_to : return self . get_turn_delta ( branch , turn_from , tick_from , tick_to ) delta = { } graph_objs = self . _graph_objs if turn_to < turn_from : updater = partial ( update_backward_window ...
Get a dictionary describing changes to all graphs .
32,781
def _init_caches ( self ) : from collections import defaultdict from . cache import Cache , NodesCache , EdgesCache self . _where_cached = defaultdict ( list ) self . _global_cache = self . query . _global_cache = { } self . _node_objs = node_objs = WeakValueDictionary ( ) self . _get_node_stuff = ( node_objs , self . ...
Immediate children of a branch
32,782
def is_parent_of ( self , parent , child ) : if parent == 'trunk' : return True if child == 'trunk' : return False if child not in self . _branches : raise ValueError ( "The branch {} seems not to have ever been created" . format ( child ) ) if self . _branches [ child ] [ 0 ] == parent : return True return self . is_p...
Return whether child is a branch descended from parent at any remove .
32,783
def _copy_plans ( self , branch_from , turn_from , tick_from ) : plan_ticks = self . _plan_ticks plan_ticks_uncommitted = self . _plan_ticks_uncommitted time_plan = self . _time_plan plans = self . _plans branch = self . branch where_cached = self . _where_cached last_plan = self . _last_plan turn_end_plan = self . _tu...
Collect all plans that are active at the given time and copy them to the current branch
32,784
def delete_plan ( self , plan ) : branch , turn , tick = self . _btt ( ) to_delete = [ ] plan_ticks = self . _plan_ticks [ plan ] for trn , tcks in plan_ticks . items ( ) : if turn == trn : for tck in tcks : if tck >= tick : to_delete . append ( ( trn , tck ) ) elif trn > turn : to_delete . extend ( ( trn , tck ) for t...
Delete the portion of a plan that has yet to occur .
32,785
def _nbtt ( self ) : from . cache import HistoryError branch , turn , tick = self . _btt ( ) tick += 1 if ( branch , turn ) in self . _turn_end_plan : if tick > self . _turn_end_plan [ branch , turn ] : self . _turn_end_plan [ branch , turn ] = tick else : tick = self . _turn_end_plan [ branch , turn ] + 1 self . _turn...
Increment the tick and return branch turn tick
32,786
def commit ( self ) : self . query . globl [ 'branch' ] = self . _obranch self . query . globl [ 'turn' ] = self . _oturn self . query . globl [ 'tick' ] = self . _otick set_branch = self . query . set_branch for branch , ( parent , turn_start , tick_start , turn_end , tick_end ) in self . _branches . items ( ) : set_b...
Write the state of all graphs to the database and commit the transaction .
32,787
def new_graph ( self , name , data = None , ** attr ) : self . _init_graph ( name , 'Graph' ) g = Graph ( self , name , data , ** attr ) self . _graph_objs [ name ] = g return g
Return a new instance of type Graph initialized with the given data if provided .
32,788
def new_digraph ( self , name , data = None , ** attr ) : self . _init_graph ( name , 'DiGraph' ) dg = DiGraph ( self , name , data , ** attr ) self . _graph_objs [ name ] = dg return dg
Return a new instance of type DiGraph initialized with the given data if provided .
32,789
def new_multigraph ( self , name , data = None , ** attr ) : self . _init_graph ( name , 'MultiGraph' ) mg = MultiGraph ( self , name , data , ** attr ) self . _graph_objs [ name ] = mg return mg
Return a new instance of type MultiGraph initialized with the given data if provided .
32,790
def new_multidigraph ( self , name , data = None , ** attr ) : self . _init_graph ( name , 'MultiDiGraph' ) mdg = MultiDiGraph ( self , name , data , ** attr ) self . _graph_objs [ name ] = mdg return mdg
Return a new instance of type MultiDiGraph initialized with the given data if provided .
32,791
def get_graph ( self , name ) : if name in self . _graph_objs : return self . _graph_objs [ name ] graphtypes = { 'Graph' : Graph , 'DiGraph' : DiGraph , 'MultiGraph' : MultiGraph , 'MultiDiGraph' : MultiDiGraph } type_s = self . query . graph_type ( name ) if type_s not in graphtypes : raise GraphNameError ( "I don't ...
Return a graph previously created with new_graph new_digraph new_multigraph or new_multidigraph
32,792
def del_graph ( self , name ) : self . get_graph ( name ) self . query . del_graph ( name ) if name in self . _graph_objs : del self . _graph_objs [ name ]
Remove all traces of a graph s existence from the database
32,793
def read_tree_dendropy ( tree ) : out = Tree ( ) d2t = dict ( ) if not hasattr ( tree , 'preorder_node_iter' ) or not hasattr ( tree , 'seed_node' ) or not hasattr ( tree , 'is_rooted' ) : raise TypeError ( "tree must be a DendroPy Tree object" ) if tree . is_rooted != True : out . is_rooted = False for node in tree . ...
Create a TreeSwift tree from a DendroPy tree
32,794
def read_tree_newick ( newick ) : if not isinstance ( newick , str ) : try : newick = str ( newick ) except : raise TypeError ( "newick must be a str" ) if newick . lower ( ) . endswith ( '.gz' ) : f = gopen ( expanduser ( newick ) ) ts = f . read ( ) . decode ( ) . strip ( ) f . close ( ) elif isfile ( expanduser ( ne...
Read a tree from a Newick string or file
32,795
def read_tree_nexus ( nexus ) : if not isinstance ( nexus , str ) : raise TypeError ( "nexus must be a str" ) if nexus . lower ( ) . endswith ( '.gz' ) : f = gopen ( expanduser ( nexus ) ) elif isfile ( expanduser ( nexus ) ) : f = open ( expanduser ( nexus ) ) else : f = nexus . splitlines ( ) trees = dict ( ) for lin...
Read a tree from a Nexus string or file
32,796
def read_tree ( input , schema ) : schema_to_function = { 'dendropy' : read_tree_dendropy , 'newick' : read_tree_newick , 'nexml' : read_tree_nexml , 'nexus' : read_tree_nexus } if schema . lower ( ) not in schema_to_function : raise ValueError ( "Invalid schema: %s (valid options: %s)" % ( schema , ', ' . join ( sorte...
Read a tree from a string or file
32,797
def avg_branch_length ( self , terminal = True , internal = True ) : if not isinstance ( terminal , bool ) : raise TypeError ( "terminal must be a bool" ) if not isinstance ( internal , bool ) : raise TypeError ( "internal must be a bool" ) if not internal and not terminal : raise RuntimeError ( "Must select either int...
Compute the average length of the selected branches of this Tree . Edges with length None will be treated as 0 - length
32,798
def branch_lengths ( self , terminal = True , internal = True ) : if not isinstance ( terminal , bool ) : raise TypeError ( "terminal must be a bool" ) if not isinstance ( internal , bool ) : raise TypeError ( "internal must be a bool" ) for node in self . traverse_preorder ( ) : if ( internal and not node . is_leaf ( ...
Generator over the lengths of the selected branches of this Tree . Edges with length None will be output as 0 - length
32,799
def closest_leaf_to_root ( self ) : best = ( None , float ( 'inf' ) ) d = dict ( ) for node in self . traverse_preorder ( ) : if node . edge_length is None : d [ node ] = 0 else : d [ node ] = node . edge_length if not node . is_root ( ) : d [ node ] += d [ node . parent ] if node . is_leaf ( ) and d [ node ] < best [ ...
Return the leaf that is closest to the root and the corresponding distance . Edges with no length will be considered to have a length of 0