idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
6,600
def purge_project ( ) : print ( 'Current Directory: {}' . format ( os . getcwd ( ) ) ) directories = os . listdir ( os . getcwd ( ) ) if CACHE_NAME in directories : response = get_input ( "Would you like to delete the cache and" "everything in it? [y/n]: " , ( 'y' , 'n' ) ) if response == 'y' : print ( "Listing local '__cythercache__':" ) cache_dir = os . path . join ( os . getcwd ( ) , "__cythercache__" ) to_delete = [ ] contents = os . listdir ( cache_dir ) if contents : for filename in contents : print ( '\t' + filename ) filepath = os . path . join ( cache_dir , filename ) to_delete . append ( filepath ) else : print ( "\tNothing was found in the cache" ) check_response = get_input ( "Delete all these files? (^)" "[y/n]: " , ( 'y' , 'n' ) ) if check_response == 'y' : for filepath in to_delete : os . remove ( filepath ) os . rmdir ( cache_dir ) else : print ( "Skipping the deletion... all files are fine!" ) else : print ( "Skipping deletion of the cache" ) else : print ( "Couldn't find a cache file ('{}') in this " "directory" . format ( CACHE_NAME ) )
Purge a directory of anything cyther related
340
9
6,601
def map ( func , items , pool_size = 10 ) : with OrderedPool ( func , pool_size ) as pool : for count , item in enumerate ( items ) : pool . put ( item ) for i in xrange ( count + 1 ) : yield pool . get ( )
a parallelized work - alike to the built - in map function
62
13
6,602
def start ( self ) : for i in xrange ( self . size ) : scheduler . schedule ( self . _runner ) self . _closing = False
start the pool s workers
34
5
6,603
def put ( self , * args , * * kwargs ) : self . inq . put ( ( self . _putcount , ( args , kwargs ) ) ) self . _putcount += 1
place a new item into the pool to be handled by the workers
45
13
6,604
def get_entity ( self , entity_id ) : entity_node = self . map_entity_id_to_node . get ( entity_id ) if entity_node is not None : return Centity ( node = entity_node , type = self . type ) else : for entity_node in self . __get_entity_nodes ( ) : if self . type == 'NAF' : label_id = 'id' elif self . type == 'KAF' : label_id = 'eid' if entity_node . get ( label_id ) == entity_id : return Centity ( node = entity_node , type = self . type ) return None
Returns the entity object for the given entity identifier
147
9
6,605
def add_external_reference_to_entity ( self , entity_id , ext_ref ) : node_entity = self . map_entity_id_to_node . get ( entity_id ) if node_entity is not None : entity = Centity ( node_entity , self . type ) entity . add_external_reference ( ext_ref ) else : print >> sys . stderr , 'Trying to add a reference to the entity' , entity_id , 'but can not be found in this file'
Adds an external reference to a entity specified by the entity identifier
113
12
6,606
def to_kaf ( self ) : if self . type == 'NAF' : for node in self . __get_entity_nodes ( ) : node . set ( 'eid' , node . get ( 'id' ) ) del node . attrib [ 'id' ]
Converts the layer from KAF to NAF
62
10
6,607
def to_naf ( self ) : if self . type == 'KAF' : for node in self . __get_entity_nodes ( ) : node . set ( 'id' , node . get ( 'eid' ) ) del node . attrib [ 'eid' ]
Converts the layer from NAF to KAF
63
10
6,608
def updateImage ( self , val ) : # import ipdb # ipdb.set_trace() # Filtrovani # Zjisteni jakou sigmu pouzit if ( self . firstRun == True and self . inputSigma >= 0 ) : sigma = np . round ( self . inputSigma , 2 ) elif self . interactivity : sigma = np . round ( self . ssigma . val , 2 ) else : sigma = np . round ( self . inputSigma , 2 ) # Prahovani (smin, smax) # max_threshold = self.threshold_upper # min_threshold = self.threshold if self . interactivity : self . smin . val = ( np . round ( self . smin . val , 2 ) ) self . smin . valtext . set_text ( '{}' . format ( self . smin . val ) ) self . smax . val = ( np . round ( self . smax . val , 2 ) ) self . smax . valtext . set_text ( '{}' . format ( self . smax . val ) ) self . threshold = self . smin . val self . threshold_upper = self . smax . val closeNum = int ( np . round ( self . sclose . val , 0 ) ) openNum = int ( np . round ( self . sopen . val , 0 ) ) self . sclose . valtext . set_text ( '{}' . format ( closeNum ) ) self . sopen . valtext . set_text ( '{}' . format ( openNum ) ) else : closeNum = self . ICBinaryClosingIterations openNum = self . ICBinaryOpeningIterations # make_image_processing(sigma, min_threshold, max_threshold, closeNum, openNum, auto_method=self.) self . imgFiltering , self . threshold = make_image_processing ( data = self . data , voxelsize_mm = self . voxelsize_mm , seeds = self . seeds , sigma_mm = sigma , min_threshold = self . threshold , max_threshold = self . threshold_upper , closeNum = closeNum , openNum = openNum , min_threshold_auto_method = self . auto_method , fill_holes = self . fillHoles , get_priority_objects = self . get_priority_objects , nObj = self . nObj ) # Vykresleni dat if ( self . interactivity == True ) : self . drawVisualization ( ) # Nastaveni kontrolnich hodnot self . firstRun = False garbage . collect ( ) self . debugInfo ( )
Hlavni update metoda . Cinny kod pro gaussovske filtrovani prahovani binarni uzavreni a otevreni a vraceni nejvetsich nebo oznacenych objektu .
600
59
6,609
def GetAll ( alias = None , location = None , session = None ) : if not alias : alias = clc . v2 . Account . GetAlias ( session = session ) policies = [ ] policy_resp = clc . v2 . API . Call ( 'GET' , 'antiAffinityPolicies/%s' % alias , { } , session = session ) for k in policy_resp : r_val = policy_resp [ k ] for r in r_val : if r . get ( 'location' ) : if location and r [ 'location' ] . lower ( ) != location . lower ( ) : continue servers = [ obj [ 'id' ] for obj in r [ 'links' ] if obj [ 'rel' ] == "server" ] policies . append ( AntiAffinity ( id = r [ 'id' ] , name = r [ 'name' ] , location = r [ 'location' ] , servers = servers , session = session ) ) return ( policies )
Gets a list of anti - affinity policies within a given account .
214
14
6,610
def GetLocation ( location = None , alias = None , session = None ) : if not location : location = clc . v2 . Account . GetLocation ( session = session ) return ( AntiAffinity . GetAll ( alias = alias , location = location , session = session ) )
Returns a list of anti - affinity policies within a specific location .
60
13
6,611
def Create ( name , alias = None , location = None , session = None ) : if not alias : alias = clc . v2 . Account . GetAlias ( session = session ) if not location : location = clc . v2 . Account . GetLocation ( session = session ) r = clc . v2 . API . Call ( 'POST' , 'antiAffinityPolicies/%s' % alias , json . dumps ( { 'name' : name , 'location' : location } ) , session = session ) return ( AntiAffinity ( id = r [ 'id' ] , name = r [ 'name' ] , location = r [ 'location' ] , servers = [ ] , session = session ) )
Creates a new anti - affinity policy within a given account .
156
13
6,612
def Update ( self , name ) : r = clc . v2 . API . Call ( 'PUT' , 'antiAffinityPolicies/%s/%s' % ( self . alias , self . id ) , { 'name' : name } , session = self . session ) self . name = name
Change the policy s name .
68
6
6,613
def _node ( handler , single = None , multi = None ) : return _AbstractSyntaxTreeNode ( handler = handler , single = ( single if single else [ ] ) , multi = ( multi if multi else [ ] ) )
Return an _AbstractSyntaxTreeNode with some elements defaulted .
49
14
6,614
def _recurse ( node , * args , * * kwargs ) : node_name = node . __class__ . __name__ try : info_for_node = _NODE_INFO_TABLE [ node_name ] except KeyError : return action = kwargs [ info_for_node . handler ] depth = kwargs [ "depth" ] # Invoke action if available if action is not None : action ( node_name , node , depth ) # Recurse recurse_kwargs = kwargs kwargs [ "depth" ] = depth + 1 for single in info_for_node . single : _recurse ( getattr ( node , single ) , * args , * * recurse_kwargs ) for multi in info_for_node . multi : for statement in getattr ( node , multi ) : _recurse ( statement , * args , * * recurse_kwargs )
Recursive print worker - recurses the AST and prints each node .
198
14
6,615
def recurse ( node , * args , * * kwargs ) : # Construct a default table of actions, using action from kwargs # if it is available. These are forwarded to _recurse. fwd = dict ( ) for node_info in _NODE_INFO_TABLE . values ( ) : fwd [ node_info . handler ] = kwargs . get ( node_info . handler , None ) fwd [ "depth" ] = 0 _recurse ( node , * args , * * fwd )
Entry point for AST recursion .
114
7
6,616
def get_filename_filled_with_checked_labels ( self , labels = None ) : if labels is None : labels = self . slab_wg . action_check_slab_ui ( ) string_labels = imma . get_nlabels ( slab = self . slab_wg . slab , labels = labels , return_mode = "str" ) filename = self . vtk_file . format ( "-" . join ( string_labels ) ) return filename
Fill used labels into filename
106
5
6,617
def patched ( module_name ) : if module_name in _patchers : return _patched_copy ( module_name , _patchers [ module_name ] ) # grab the unpatched version of the module for posterity old_module = sys . modules . pop ( module_name , None ) # apply all the standard library patches we have saved = [ ( module_name , old_module ) ] for name , patch in _patchers . iteritems ( ) : new_mod = _patched_copy ( name , patch ) saved . append ( ( name , sys . modules . pop ( name ) ) ) sys . modules [ name ] = new_mod try : # import the requested module with patches in place result = __import__ ( module_name , { } , { } , module_name . rsplit ( "." , 1 ) [ 0 ] ) finally : # put all the original modules back as they were for name , old_mod in saved : if old_mod is None : sys . modules . pop ( name , None ) else : sys . modules [ name ] = old_mod return result
import and return a named module with patches applied locally only
238
11
6,618
def patched_context ( * module_names , * * kwargs ) : local = kwargs . pop ( 'local' , False ) if kwargs : raise TypeError ( "patched_context() got an unexpected keyword " + "argument %r" % kwargs . keys ( ) [ 0 ] ) patch ( * module_names ) if local : @ scheduler . local_incoming_hook @ scheduler . local_outgoing_hook def hook ( direction , target ) : { 1 : patch , 2 : unpatch } [ direction ] ( * module_names ) yield unpatch ( * module_names ) if local : scheduler . remove_local_incoming_hook ( hook ) scheduler . remove_local_outgoing_hook ( hook )
apply emulation patches only for a specific context
166
8
6,619
def patch ( * module_names ) : if not module_names : module_names = _patchers . keys ( ) log . info ( "monkey-patching in-place (%d modules)" % len ( module_names ) ) for module_name in module_names : if module_name not in _patchers : raise ValueError ( "'%s' is not greenhouse-patchable" % module_name ) for module_name in module_names : if module_name in sys . modules : module = sys . modules [ module_name ] else : module = __import__ ( module_name , { } , { } , module_name . rsplit ( "." , 1 ) [ 0 ] ) for attr , patch in _patchers [ module_name ] . items ( ) : setattr ( module , attr , patch )
apply monkey - patches to stdlib modules in - place
180
11
6,620
def from_json ( data ) : parsed_data = json . loads ( data ) trigger = TriggerInfo ( parsed_data [ 'trigger' ] [ 'class' ] , parsed_data [ 'trigger' ] [ 'kind' ] , ) # extract content type, needed to decode body content_type = parsed_data [ 'content_type' ] return Event ( body = Event . decode_body ( parsed_data [ 'body' ] , content_type ) , content_type = content_type , trigger = trigger , fields = parsed_data . get ( 'fields' ) , headers = parsed_data . get ( 'headers' ) , _id = parsed_data [ 'id' ] , method = parsed_data [ 'method' ] , path = parsed_data [ 'path' ] , size = parsed_data [ 'size' ] , timestamp = datetime . datetime . utcfromtimestamp ( parsed_data [ 'timestamp' ] ) , url = parsed_data [ 'url' ] , _type = parsed_data [ 'type' ] , type_version = parsed_data [ 'type_version' ] , version = parsed_data [ 'version' ] )
Decode event encoded as JSON by processor
257
8
6,621
def decode_body ( body , content_type ) : if isinstance ( body , dict ) : return body else : try : decoded_body = base64 . b64decode ( body ) except : return body if content_type == 'application/json' : try : return json . loads ( decoded_body ) except : pass return decoded_body
Decode event body
77
4
6,622
def furtherArgsProcessing ( args ) : if isinstance ( args , str ) : unprocessed = args . strip ( ) . split ( ' ' ) if unprocessed [ 0 ] == 'cyther' : del unprocessed [ 0 ] args = parser . parse_args ( unprocessed ) . __dict__ elif isinstance ( args , argparse . Namespace ) : args = args . __dict__ elif isinstance ( args , dict ) : pass else : raise CytherError ( "Args must be a instance of str or argparse.Namespace, not '{}'" . format ( str ( type ( args ) ) ) ) if args [ 'watch' ] : args [ 'timestamp' ] = True args [ 'watch_stats' ] = { 'counter' : 0 , 'errors' : 0 , 'compiles' : 0 , 'polls' : 0 } args [ 'print_args' ] = True return args
Converts args and deals with incongruities that argparse couldn t handle
205
16
6,623
def processFiles ( args ) : to_process = [ ] for filename in args [ 'filenames' ] : file = dict ( ) if args [ 'include' ] : file [ 'include' ] = INCLUDE_STRING + '' . join ( [ '-I' + item for item in args [ 'include' ] ] ) else : file [ 'include' ] = INCLUDE_STRING file [ 'file_path' ] = getPath ( filename ) file [ 'file_base_name' ] = os . path . splitext ( os . path . basename ( file [ 'file_path' ] ) ) [ 0 ] file [ 'no_extension' ] , file [ 'extension' ] = os . path . splitext ( file [ 'file_path' ] ) if file [ 'extension' ] not in CYTHONIZABLE_FILE_EXTS : raise CytherError ( "The file '{}' is not a designated cython file" . format ( file [ 'file_path' ] ) ) base_path = os . path . dirname ( file [ 'file_path' ] ) local_build = args [ 'local' ] if not local_build : cache_name = os . path . join ( base_path , '__cythercache__' ) os . makedirs ( cache_name , exist_ok = True ) file [ 'c_name' ] = os . path . join ( cache_name , file [ 'file_base_name' ] ) + '.c' else : file [ 'c_name' ] = file [ 'no_extension' ] + '.c' file [ 'object_file_name' ] = os . path . splitext ( file [ 'c_name' ] ) [ 0 ] + '.o' output_name = args [ 'output_name' ] if args [ 'watch' ] : file [ 'output_name' ] = file [ 'no_extension' ] + DEFAULT_OUTPUT_EXTENSION elif output_name : if os . path . exists ( output_name ) and os . path . isfile ( output_name ) : file [ 'output_name' ] = output_name else : dirname = os . path . dirname ( output_name ) if not dirname : dirname = os . getcwd ( ) if os . path . exists ( dirname ) : file [ 'output_name' ] = output_name else : raise CytherError ( 'The directory specified to write' 'the output file in does not exist' ) else : file [ 'output_name' ] = file [ 'no_extension' ] + DEFAULT_OUTPUT_EXTENSION file [ 'stamp_if_error' ] = 0 to_process . append ( file ) return to_process
Generates and error checks each file s information before the compilation actually starts
626
14
6,624
def makeCommands ( file ) : commands = [ [ 'cython' , '-a' , '-p' , '-o' , file [ 'c_name' ] , file [ 'file_path' ] ] , [ 'gcc' , '-DNDEBUG' , '-g' , '-fwrapv' , '-O3' , '-Wall' , '-Wextra' , '-pthread' , '-fPIC' , '-c' , file [ 'include' ] , '-o' , file [ 'object_file_name' ] , file [ 'c_name' ] ] , [ 'gcc' , '-g' , '-Wall' , '-Wextra' , '-pthread' , '-shared' , RUNTIME_STRING , '-o' , file [ 'output_name' ] , file [ 'object_file_name' ] , L_OPTION ] ] return commands
Given a high level preset it will construct the basic args to pass over . ninja beast minimal swift
218
19
6,625
def collection ( et_model , variable , collections , start_date , end_date , t_interval , geometry , * * kwargs ) : # Load the ET model if et_model . lower ( ) == 'ndvi' : # # DEADBEEF - Manually adding OpenET Model to system path # # This will eventually be handled by import openet modules # import os # model_path = os.path.dirname(os.path.dirname(os.path.dirname( # os.path.abspath(os.path.realpath(__file__))))) # print(model_path) # sys.path.insert(0, os.path.join(model_path, 'openet-ndvi-test')) # print(sys.path) try : import openet . ndvi as model except ModuleNotFoundError : print ( '\nThe ET model {} could not be imported' . format ( et_model ) + '\nPlease ensure that the model has been installed' ) return False except Exception as e : print ( 'Unhandled Exception: {}' . format ( e ) ) raise elif et_model . lower ( ) == 'ssebop' : # # DEADBEEF - Manually adding OpenET Models to system path # # This will eventually be handled by import openet modules # import os # model_path = os.path.dirname(os.path.dirname(os.path.dirname( # os.path.abspath(os.path.realpath(__file__))))) # print(model_path) # sys.path.insert(0, os.path.join(model_path, 'openet-ssebop-test')) try : import openet . ssebop as model except ModuleNotFoundError : print ( '\nThe ET model {} could not be imported' . format ( et_model ) + '\nPlease ensure that the model has been installed' ) return False except Exception as e : print ( 'Unhandled Exception: {}' . format ( e ) ) raise else : # CGM - This could just be a value error exception raise ValueError ( 'unsupported et_model type' ) variable_coll = model . collection ( variable , collections , start_date , end_date , t_interval , geometry , * * kwargs ) return variable_coll
Generic OpenET Collection
521
4
6,626
def get_terminals_as_list ( self ) : terminalList = [ ] for t_node in self . __get_t_nodes ( ) : terminalList . append ( Cterminal ( t_node ) ) return terminalList
Iterator that returns all the terminal objects
52
7
6,627
def get_edges_as_list ( self ) : my_edges = [ ] for edge_node in self . __get_edge_nodes ( ) : my_edges . append ( Cedge ( edge_node ) ) return my_edges
Iterator that returns all the edge objects
57
7
6,628
def select_labels ( self , labels = None ) : self . _resize_if_required ( ) segmentation = self . _select_labels ( self . resized_segmentation , labels ) # logger.debug("select labels in show_segmentation {} sum {}".format(labels, np.sum(segmentation))) self . resized_binar_segmentation = segmentation
Prepare binar segmentation based on input segmentation and labels .
90
14
6,629
def _select_labels ( self , segmentation , labels = None ) : logger . debug ( "select_labels() started with labels={}" . format ( labels ) ) if self . slab is not None and labels is not None : segmentation_out = select_labels ( segmentation , labels , slab = self . slab ) else : logger . warning ( "Nothing found for labels " + str ( labels ) ) un = np . unique ( segmentation ) if len ( un ) < 2 : logger . error ( "Just one label found in input segmenation" ) segmentation_out = ( segmentation > un [ 0 ] ) . astype ( segmentation . dtype ) return segmentation_out
Get selection of labels from input segmentation
153
8
6,630
def Get ( self , key ) : for template in self . templates : if template . id == key : return ( template )
Get template by providing name ID or other unique key .
26
11
6,631
def Search ( self , key ) : results = [ ] for template in self . templates : if template . id . lower ( ) . find ( key . lower ( ) ) != - 1 : results . append ( template ) elif template . name . lower ( ) . find ( key . lower ( ) ) != - 1 : results . append ( template ) return ( results )
Search template list by providing partial name ID or other key .
78
12
6,632
def SecondsToZuluTS ( secs = None ) : if not secs : secs = int ( time . time ( ) ) return ( datetime . utcfromtimestamp ( secs ) . strftime ( "%Y-%m-%dT%H:%M:%SZ" ) )
Returns Zulu TS from unix time seconds .
68
10
6,633
def main ( ) : parser = argparse . ArgumentParser ( description = 'DistanceClassifier for classification based on distance measure in feature space.' , add_help = False ) parser . add_argument ( 'INPUT_FILE' , type = str , help = 'Data file to perform DistanceClassifier on; ensure that the class label column is labeled as "class".' ) parser . add_argument ( '-h' , '--help' , action = 'help' , help = 'Show this help message and exit.' ) parser . add_argument ( '-is' , action = 'store' , dest = 'INPUT_SEPARATOR' , default = '\t' , type = str , help = 'Character used to separate columns in the input file.' ) parser . add_argument ( '-d' , action = 'store' , dest = 'D' , default = 'mahalanobis' , choices = [ 'mahalanobis' , 'euclidean' ] , type = str , help = 'Distance metric to use.' ) parser . add_argument ( '-v' , action = 'store' , dest = 'VERBOSITY' , default = 1 , choices = [ 0 , 1 , 2 ] , type = int , help = 'How much information DistanceClassifier communicates while it is running: 0 = none, 1 = minimal, 2 = all.' ) parser . add_argument ( '-s' , action = 'store' , dest = 'RANDOM_STATE' , default = 0 , type = int , help = 'Random state for train/test split.' ) parser . add_argument ( '--version' , action = 'version' , version = 'DistanceClassifier {version}' . format ( version = __version__ ) , help = 'Show DistanceClassifier\'s version number and exit.' ) args = parser . parse_args ( ) if args . VERBOSITY >= 2 : print ( '\nDistanceClassifier settings:' ) for arg in sorted ( args . __dict__ ) : print ( '{}\t=\t{}' . format ( arg , args . __dict__ [ arg ] ) ) print ( '' ) input_data = pd . read_csv ( args . INPUT_FILE , sep = args . INPUT_SEPARATOR ) if 'Class' in input_data . columns . values : input_data . rename ( columns = { 'Label' : 'label' } , inplace = True ) RANDOM_STATE = args . RANDOM_STATE if args . RANDOM_STATE > 0 else None # # training_indices, testing_indices = train_test_split(input_data.index, # stratify=input_data['label'].values, # train_size=0.75, # test_size=0.25, # random_state=RANDOM_STATE) # # training_features = input_data.loc[training_indices].drop('label', axis=1).values # training_classes = input_data.loc[training_indices, 'label'].values # # testing_features = input_data.loc[testing_indices].drop('label', axis=1).values # testing_classes = input_data.loc[testing_indices, 'label'].values # Run and evaluate DistanceClassifier on the training and testing data dc = DistanceClassifier ( d = args . D ) # dc.fit(training_features, training_classes) dc . fit ( input_data . drop ( 'label' , axis = 1 ) . values , input_data [ 'label' ] . values ) print ( dc . score ( input_data . drop ( 'label' , axis = 1 ) . values , input_data [ 'label' ] . values ) )
Main function that is called when DistanceClassifier is run on the command line
826
15
6,634
def fit ( self , features , classes ) : # class labels classes = self . le . fit_transform ( classes ) # group the data by class label X = [ ] self . mu = [ ] self . Z = [ ] for i in np . unique ( classes ) : X . append ( features [ classes == i ] ) self . mu . append ( np . mean ( X [ i ] , axis = 0 ) ) if self . d == 'mahalanobis' : self . Z . append ( np . cov ( X [ i ] . transpose ( ) ) ) return self
Constructs the DistanceClassifier from the provided training data
125
11
6,635
def predict ( self , features ) : # get distance of features to class clusters distances = [ self . _distance ( x ) for x in features ] # assign class label belonging to smallest distance class_predict = [ np . argmin ( d ) for d in distances ] return self . le . inverse_transform ( class_predict )
Predict class outputs for an unlabelled feature set
71
11
6,636
def _distance ( self , x ) : distance = np . empty ( [ len ( self . mu ) ] ) for i in np . arange ( len ( self . mu ) ) : if self . d == 'mahalanobis' and self . is_invertible ( self . Z [ i ] ) : distance [ i ] = ( x - self . mu [ i ] ) . dot ( np . linalg . inv ( self . Z [ i ] ) ) . dot ( ( x - self . mu [ i ] ) . transpose ( ) ) else : distance [ i ] = ( x - self . mu [ i ] ) . dot ( ( x - self . mu [ i ] ) . transpose ( ) ) return distance
returns distance measures for features
160
6
6,637
def score ( self , features , classes , scoring_function = accuracy_score , * * scoring_function_kwargs ) : if not self . mu : raise ValueError ( 'The DistanceClassifier model must be fit before score() can be called' ) return scoring_function ( classes , self . predict ( features ) , * * scoring_function_kwargs )
Estimates the accuracy of the predictions from the constructed feature
77
11
6,638
def is_invertible ( self , X ) : if len ( X . shape ) == 2 : return X . shape [ 0 ] == X . shape [ 1 ] and np . linalg . matrix_rank ( X ) == X . shape [ 0 ] else : return False
checks if Z is invertible
60
7
6,639
def get_span_ids ( self ) : node_span = self . node . find ( 'span' ) if node_span is not None : mySpan = Cspan ( node_span ) span_ids = mySpan . get_span_ids ( ) return span_ids else : return [ ]
Returns the span object of the term
67
7
6,640
def set_span_from_ids ( self , span_list ) : this_span = Cspan ( ) this_span . create_from_ids ( span_list ) self . node . append ( this_span . get_node ( ) )
Sets the span for the term from list of ids
54
12
6,641
def get_term ( self , term_id ) : if term_id in self . idx : return Cterm ( self . idx [ term_id ] , self . type ) else : return None
Returns the term object for the supplied identifier
44
8
6,642
def add_term ( self , term_obj ) : if term_obj . get_id ( ) in self . idx : raise ValueError ( "Term with id {} already exists!" . format ( term_obj . get_id ( ) ) ) self . node . append ( term_obj . get_node ( ) ) self . idx [ term_obj . get_id ( ) ] = term_obj
Adds a term object to the layer
89
7
6,643
def add_external_reference ( self , term_id , external_ref ) : if term_id in self . idx : term_obj = Cterm ( self . idx [ term_id ] , self . type ) term_obj . add_external_reference ( external_ref ) else : print ( '{term_id} not in self.idx' . format ( * * locals ( ) ) )
Adds an external reference for the given term
90
8
6,644
def init_slab ( self , slab = None , segmentation = None , voxelsize_mm = None , show_ok_button = False ) : self . segmentation = segmentation self . voxelsize_mm = voxelsize_mm from . import show_segmentation self . slab = show_segmentation . create_slab_from_segmentation ( self . segmentation , slab = slab ) if show_ok_button : ok_button = QPushButton ( "Ok" ) ok_button . clicked . connect ( self . _action_ok_button ) self . superMainScrollLayout . addWidget ( ok_button )
Create widget with segmentation labels information used to select labels .
145
12
6,645
def GetServers ( location , group = None , alias = None , name_groups = False ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) payload = { 'AccountAlias' : alias } if group : payload [ 'HardwareGroupUUID' ] = clc . v1 . Group . GetGroupUUID ( group , alias , location ) else : payload [ 'Location' ] = location try : r = clc . v1 . API . Call ( 'post' , 'Server/GetAllServers' , payload ) if name_groups : r [ 'Servers' ] = clc . v1 . Group . NameGroups ( r [ 'Servers' ] , 'HardwareGroupUUID' ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r [ 'Servers' ] ) except Exception as e : if str ( e ) == "Hardware does not exist for location" : return ( [ ] ) else : raise
Gets a deep list of all Servers for a given Hardware Group and its sub groups or all Servers for a given location .
214
27
6,646
def GetAllServers ( alias = None , name_groups = False ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) servers = [ ] clc . v1 . Account . GetLocations ( ) for location in clc . LOCATIONS : try : r = clc . v1 . API . Call ( 'post' , 'Server/GetAllServers' , { 'AccountAlias' : alias , 'Location' : location } , hide_errors = [ 5 , ] ) if name_groups : r [ 'Servers' ] = clc . v1 . Group . NameGroups ( r [ 'Servers' ] , 'HardwareGroupUUID' ) if int ( r [ 'StatusCode' ] ) == 0 : servers += r [ 'Servers' ] except : pass return ( servers )
Gets a deep list of all Servers in all groups and datacenters .
183
17
6,647
def GetTemplateID ( alias , location , name ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if location is None : location = clc . v1 . Account . GetLocation ( ) r = Server . GetTemplates ( alias , location ) for row in r : if row [ 'Name' ] . lower ( ) == name . lower ( ) : return ( row [ 'OperatingSystem' ] ) else : if clc . args : clc . v1 . output . Status ( "ERROR" , 3 , "Template %s not found in account %s datacenter %s" % ( name , alias , location ) ) raise Exception ( "Template not found" )
Given a template name return the unique OperatingSystem ID .
154
11
6,648
def ConvertToTemplate ( server , template , password = None , alias = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if password is None : password = clc . v1 . Server . GetCredentials ( [ server , ] , alias ) [ 0 ] [ 'Password' ] r = clc . v1 . API . Call ( 'post' , 'Server/ConvertServerToTemplate' , { 'AccountAlias' : alias , 'Name' : server , 'Password' : password , 'TemplateAlias' : template } ) return ( r )
Converts an existing server into a template .
130
9
6,649
def RestoreServer ( server , group , alias , location ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if location is None : location = clc . v1 . Account . GetLocation ( ) if re . match ( "([a-zA-Z0-9]){32}" , group . replace ( "-" , "" ) ) : groups_uuid = group else : groups_uuid = clc . v1 . Group . GetGroupUUID ( group , alias , location ) r = clc . v1 . API . Call ( 'post' , 'Server/RestoreServer' , { 'AccountAlias' : alias , 'Name' : server , 'HardwareGroupUUID' : groups_uuid } ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r )
Restores an archived server .
184
6
6,650
def _ServerActions ( action , alias , servers ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) results = [ ] for server in servers : r = clc . v1 . API . Call ( 'post' , 'Server/%sServer' % ( action ) , { 'AccountAlias' : alias , 'Name' : server } ) if int ( r [ 'StatusCode' ] ) == 0 : results . append ( r ) return ( results )
Archives the specified servers .
108
6
6,651
def GetDisks ( server , alias = None , guest_names = True ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) r = clc . v1 . API . Call ( 'post' , 'Server/ListDisks' , { 'AccountAlias' : alias , 'Name' : server , 'QueryGuestDiskNames' : guest_names } ) return ( r [ 'Disks' ] )
Returns list of disks configured for the server
97
8
6,652
def DeleteDisk ( server , scsi_bus_id , scsi_device_id , alias = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) r = clc . v1 . API . Call ( 'post' , 'Server/DeleteDisk' , { 'AccountAlias' : alias , 'Name' : server , 'OverrideFailsafes' : True , 'ScsiBusID' : scsi_bus_id , 'ScsiDeviceID' : scsi_device_id } ) return ( r )
Deletes the specified disk .
124
6
6,653
def get_external_references ( self ) : for ext_ref_node in self . node . findall ( 'externalRef' ) : ext_refs_obj = CexternalReference ( ext_ref_node ) for ref in ext_refs_obj : yield ref
Iterator that returns all the external reference objects of the external references object
60
13
6,654
def set_id ( self , this_id ) : if self . type == 'NAF' : return self . node . set ( 'id' , this_id ) elif self . type == 'KAF' : return self . node . set ( 'wid' , this_id )
Set the identifier for the token
63
6
6,655
def to_naf ( self ) : if self . type == 'KAF' : self . type = 'NAF' for node in self . __get_wf_nodes ( ) : node . set ( 'id' , node . get ( 'wid' ) ) del node . attrib [ 'wid' ]
Converts the object to NAF
70
7
6,656
def get_wf ( self , token_id ) : wf_node = self . idx . get ( token_id ) if wf_node is not None : return Cwf ( node = wf_node , type = self . type ) else : for wf_node in self . __get_wf_nodes ( ) : if self . type == 'NAF' : label_id = 'id' elif self . type == 'KAF' : label_id = 'wid' if wf_node . get ( label_id ) == token_id : return Cwf ( node = wf_node , type = self . type ) return None
Returns the token object for the given token identifier
149
9
6,657
def add_wf ( self , wf_obj ) : if wf_obj . get_id ( ) in self . idx : raise ValueError ( "Text node (wf) with id {} already exists!" . format ( wf_obj . get_id ( ) ) ) self . node . append ( wf_obj . get_node ( ) ) self . idx [ wf_obj . get_id ( ) ] = wf_obj
Adds a token object to the text layer
101
8
6,658
def remove_tokens_of_sentence ( self , sentence_id ) : nodes_to_remove = set ( ) for wf in self : if wf . get_sent ( ) == sentence_id : nodes_to_remove . add ( wf . get_node ( ) ) for node in nodes_to_remove : self . node . remove ( node )
Removes the tokens of the given sentence
82
8
6,659
def aggregate_daily ( image_coll , start_date = None , end_date = None , agg_type = 'mean' ) : if start_date and end_date : test_coll = image_coll . filterDate ( ee . Date ( start_date ) , ee . Date ( end_date ) ) elif start_date : test_coll = image_coll . filter ( ee . Filter . greaterThanOrEquals ( 'system:time_start' , ee . Date ( start_date ) . millis ( ) ) ) elif end_date : test_coll = image_coll . filter ( ee . Filter . lessThan ( 'system:time_start' , ee . Date ( end_date ) . millis ( ) ) ) else : test_coll = image_coll # Build a list of dates in the image_coll def get_date ( time ) : return ee . Date ( ee . Number ( time ) ) . format ( 'yyyy-MM-dd' ) date_list = ee . List ( test_coll . aggregate_array ( 'system:time_start' ) ) . map ( get_date ) . distinct ( ) . sort ( ) def aggregate_func ( date_str ) : start_date = ee . Date ( ee . String ( date_str ) ) end_date = start_date . advance ( 1 , 'day' ) agg_coll = image_coll . filterDate ( start_date , end_date ) # if agg_type.lower() == 'mean': agg_img = agg_coll . mean ( ) # elif agg_type.lower() == 'median': # agg_img = agg_coll.median() return agg_img . set ( { 'system:index' : start_date . format ( 'yyyyMMdd' ) , 'system:time_start' : start_date . millis ( ) , 'date' : start_date . format ( 'yyyy-MM-dd' ) , } ) return ee . ImageCollection ( date_list . map ( aggregate_func ) )
Aggregate images by day without using joins
464
8
6,660
def remove_this_clink ( self , clink_id ) : for clink in self . get_clinks ( ) : if clink . get_id ( ) == clink_id : self . node . remove ( clink . get_node ( ) ) break
Removes the clink for the given clink identifier
60
11
6,661
def GetGroupEstimate ( group , alias = None , location = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if location is None : location = clc . v1 . Account . GetLocation ( ) group_uuid = clc . v1 . Group . GetGroupUUID ( group , alias , location ) r = clc . v1 . API . Call ( 'post' , 'Billing/GetGroupEstimate' , { 'AccountAlias' : alias , 'HardwareGroupUUID' : group_uuid } ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r )
Gets estimated costs for a group of servers .
144
10
6,662
def GetGroupSummaries ( alias = None , date_start = None , date_end = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) payload = { 'AccountAlias' : alias } if date_start is not None : payload [ 'StartDate' ] = date_start if date_end is not None : payload [ 'EndDate' ] = date_end r = clc . v1 . API . Call ( 'post' , 'Billing/GetGroupSummaries' , payload ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r [ 'GroupTotals' ] )
Gets the charges for groups and servers within a given account and for any date range .
146
18
6,663
def GetServerEstimate ( server , alias = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) r = clc . v1 . API . Call ( 'post' , 'Billing/GetServerEstimate' , { 'AccountAlias' : alias , 'ServerName' : server } ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r )
Gets the estimated monthly cost for a given server .
92
11
6,664
def display_direct ( ) : include_dirs , runtime_dirs , runtime = get_direct_config ( ) print ( "Include Search Dirs: {}" . format ( include_dirs ) ) print ( "\tContents: {}\n" . format ( get_dir_contents ( include_dirs ) ) ) print ( "Runtime Search Dirs: {}" . format ( runtime_dirs ) ) print ( "\tContents: {}\n" . format ( get_dir_contents ( runtime_dirs ) ) ) print ( "Runtime Libs: '{}'" . format ( runtime ) )
Displays the output of get_direct_config formatted nicely
136
12
6,665
def save ( self , filename = 'saved.ol.p' ) : import dill as pickle sv = { # 'feature_function': self.feature_function, 'cl' : self . cl } pickle . dump ( sv , open ( filename , "wb" ) )
Save model to pickle file
62
6
6,666
def GetGroupUUID ( group , alias = None , location = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if location is None : location = clc . v1 . Account . GetLocation ( ) r = Group . GetGroups ( location , alias ) for row in r : if row [ 'Name' ] == group : return ( row [ 'UUID' ] ) else : if clc . args : clc . v1 . output . Status ( "ERROR" , 3 , "Group %s not found in account %s datacenter %s" % ( group , alias , location ) ) raise Exception ( "Group not found" )
Given a group name return the unique group ID .
150
10
6,667
def NameGroups ( data_arr , id_key ) : new_data_arr = [ ] for data in data_arr : try : data_arr [ id_key ] = clc . _GROUP_MAPPING [ data [ id_key ] ] except : pass new_data_arr . append ( data ) if clc . args : clc . v1 . output . Status ( "ERROR" , 2 , "Group name conversion not yet implemented" ) return ( new_data_arr )
Get group name associated with ID .
109
7
6,668
def GetGroups ( location = None , alias = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if location is None : location = clc . v1 . Account . GetLocation ( ) r = clc . v1 . API . Call ( 'post' , 'Group/GetGroups' , { 'AccountAlias' : alias , 'Location' : location } ) for group in r [ 'HardwareGroups' ] : clc . _GROUP_MAPPING [ group [ 'UUID' ] ] = group [ 'Name' ] if int ( r [ 'StatusCode' ] ) == 0 : return ( r [ 'HardwareGroups' ] )
Return all of alias groups in the given location .
153
10
6,669
def _GroupActions ( action , group , alias , location ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if location is None : location = clc . v1 . Account . GetLocation ( ) groups_uuid = Group . GetGroupUUID ( group , alias , location ) r = clc . v1 . API . Call ( 'post' , 'Group/%sHardwareGroup' % ( action ) , { 'UUID' : groups_uuid , 'AccountAlias' : alias } ) return ( r )
Applies group level actions .
123
6
6,670
def get_endpoint_and_path ( environ ) : path = environ [ 'PATH_INFO' ] components = path . split ( '/' ) if '..' in components : raise HttpError ( '400 Bad Request' , 'Path cannot contain "..".' ) # Strip closing slash if components and components [ - 1 ] == '' : components . pop ( ) # If path contained '//', get the segment after the last occurence try : first = _rindex ( components , '' ) + 1 except ValueError : first = 0 components = components [ first : ] if len ( components ) == 0 : return '' , '' else : return components [ 0 ] , '/' . join ( components [ 1 : ] )
Extracts endpoint and path from the request URL .
156
11
6,671
def pack ( self ) : block = bytearray ( self . size ) self . pack_into ( block ) return block
convenience function for packing
27
6
6,672
def encoded_class ( block , offset = 0 ) : if not block : raise InvalidFileFormatNull for key in __magicmap__ : if block . find ( key , offset , offset + len ( key ) ) > - 1 : return __magicmap__ [ key ] raise InvalidFileFormat
predicate indicating whether a block of memory includes a magic number
61
12
6,673
def _copy_stream ( src , dest , length = 0 ) : if length == 0 : shutil . copyfileobj ( src , dest ) return bytes_left = length while bytes_left > 0 : buf_size = min ( _BUFFER_SIZE , bytes_left ) buf = src . read ( buf_size ) dest . write ( buf ) bytes_left -= buf_size
Similar to shutil . copyfileobj but supports limiting data size .
83
14
6,674
def _path_exists ( path ) : return os . path . exists ( path ) or os . path . islink ( path )
Checks if the path exists - is a file a directory or a symbolic link that may be broken .
29
21
6,675
def _exclusive_lock ( path ) : _create_file_dirs ( path ) fd = os . open ( path , os . O_WRONLY | os . O_CREAT , 0o600 ) try : retries_left = _LOCK_RETRIES success = False while retries_left > 0 : # try to acquire the lock in a loop # because gevent doesn't treat flock as IO, # so waiting here without yielding would get the worker killed try : fcntl . flock ( fd , fcntl . LOCK_EX | fcntl . LOCK_NB ) success = True break except IOError as e : if e . errno in [ errno . EAGAIN , errno . EWOULDBLOCK ] : # This yields execution to other green threads. gevent . sleep ( _LOCK_SLEEP_TIME_S ) retries_left -= 1 else : raise if success : yield else : raise ConcurrentModificationError ( path ) finally : if success : fcntl . flock ( fd , fcntl . LOCK_UN ) os . close ( fd )
A simple wrapper for fcntl exclusive lock .
248
11
6,676
def delete ( self , name , version , _lock = True ) : link_path = self . _link_path ( name ) if _lock : file_lock = _exclusive_lock ( self . _lock_path ( 'links' , name ) ) else : file_lock = _no_lock ( ) with file_lock : logger . debug ( 'Acquired or inherited lock for link %s.' , name ) if not _path_exists ( link_path ) : raise FiletrackerFileNotFoundError if _file_version ( link_path ) > version : logger . info ( 'Tried to delete newer version of %s (%d < %d), ignoring.' , name , version , _file_version ( link_path ) ) return False digest = self . _digest_for_link ( name ) with _exclusive_lock ( self . _lock_path ( 'blobs' , digest ) ) : logger . debug ( 'Acquired lock for blob %s.' , digest ) should_delete_blob = False with self . _db_transaction ( ) as txn : logger . debug ( 'Started DB transaction (deleting link).' ) digest_bytes = digest . encode ( ) link_count = self . db . get ( digest_bytes , txn = txn ) if link_count is None : raise RuntimeError ( "File exists but has no key in db" ) link_count = int ( link_count ) if link_count == 1 : logger . debug ( 'Deleting last link to blob %s.' , digest ) self . db . delete ( digest_bytes , txn = txn ) self . db . delete ( '{}:logical_size' . format ( digest ) . encode ( ) , txn = txn ) should_delete_blob = True else : new_count = str ( link_count - 1 ) . encode ( ) self . db . put ( digest_bytes , new_count , txn = txn ) logger . debug ( 'Committing DB transaction (deleting link).' ) logger . debug ( 'Committed DB transaction (deleting link).' ) os . unlink ( link_path ) logger . debug ( 'Deleted link %s.' , name ) if should_delete_blob : os . unlink ( self . _blob_path ( digest ) ) logger . debug ( 'Released lock for blob %s.' , digest ) logger . debug ( 'Released (or gave back) lock for link %s.' , name ) return True
Removes a file from the storage .
552
8
6,677
def stored_version ( self , name ) : link_path = self . _link_path ( name ) if not _path_exists ( link_path ) : return None return _file_version ( link_path )
Returns the version of file name or None if it doesn t exist .
48
14
6,678
def save ( self , new_path = None ) : self . saved_in_temp = new_path is None if new_path is None : fd , new_path = tempfile . mkstemp ( ) os . close ( fd ) if self . current_path : shutil . move ( self . current_path , new_path ) else : with open ( new_path , 'wb' ) as dest : _copy_stream ( self . _data , dest , self . _size ) self . current_path = new_path
Moves or creates the file with stream contents to a new location .
118
14
6,679
def get_lps ( self ) : if self . header is not None : for linguisticProcessor in self . header : for lp in linguisticProcessor : yield lp
Iterator that returns all the lp objects from linguistic processors layers from the header
37
15
6,680
def get_trees_as_list ( self ) : mytrees = [ ] if self . constituency_layer is not None : for tree in self . constituency_layer . get_trees ( ) : mytrees . append ( tree ) return mytrees
Iterator that returns the constituency trees
57
6
6,681
def convert_factualitylayer_to_factualities ( self ) : if self . factuality_layer is not None : this_node = self . factuality_layer . get_node ( ) if this_node . tag == 'factualitylayer' : new_node = Cfactualities ( ) #create dictionary from token ids to the term ids token2term = { } for t in self . get_terms ( ) : s = t . get_span ( ) for w in s . get_span_ids ( ) : token2term [ w ] = t . get_id ( ) fnr = 0 for fv in self . get_factvalues ( ) : fnr += 1 conf = fv . get_confidence ( ) wid = fv . get_id ( ) tid = token2term . get ( wid ) fnode = Cfactuality ( ) #set span with tid as element fspan = Cspan ( ) fspan . add_target_id ( tid ) fnode . set_span ( fspan ) #add factVal element with val, resource = factbank, + confidence if present fVal = Cfactval ( ) fVal . set_resource ( 'factbank' ) fVal . set_value ( fv . get_prediction ( ) ) if conf : fVal . set_confidence ( conf ) fnode . set_id ( 'f' + str ( fnr ) ) fnode . add_factval ( fVal ) new_node . add_factuality ( fnode ) self . root . remove ( this_node ) self . root . append ( new_node . get_node ( ) ) self . factuality_layer = new_node
Takes information from factuality layer in old representation Creates new factuality representation and removes the old layer
367
21
6,682
def get_constituency_extractor ( self ) : if self . constituency_layer is not None : ##Otherwise there are no constituens if self . my_constituency_extractor is None : self . my_constituency_extractor = Cconstituency_extractor ( self ) return self . my_constituency_extractor else : return None
Returns a constituency extractor object
80
6
6,683
def get_dependency_extractor ( self ) : if self . dependency_layer is not None : #otherwise there are no dependencies if self . my_dependency_extractor is None : self . my_dependency_extractor = Cdependency_extractor ( self ) return self . my_dependency_extractor else : return None
Returns a dependency extractor object
75
6
6,684
def add_wf ( self , wf_obj ) : if self . text_layer is None : self . text_layer = Ctext ( type = self . type ) self . root . append ( self . text_layer . get_node ( ) ) self . text_layer . add_wf ( wf_obj )
Adds a token to the text layer
72
7
6,685
def add_term ( self , term_obj ) : if self . term_layer is None : self . term_layer = Cterms ( type = self . type ) self . root . append ( self . term_layer . get_node ( ) ) self . term_layer . add_term ( term_obj )
Adds a term to the term layer
68
7
6,686
def add_chunk ( self , chunk_obj ) : if self . chunk_layer is None : self . chunk_layer = Cchunks ( type = self . type ) self . root . append ( self . chunk_layer . get_node ( ) ) self . chunk_layer . add_chunk ( chunk_obj )
Adds a chunk to the chunk layer
71
7
6,687
def create_term ( self , lemma , pos , morphofeat , tokens , id = None ) : if id is None : n = 1 if self . term_layer is None else len ( self . term_layer . idx ) + 1 id = "t{n}" . format ( * * locals ( ) ) new_term = Cterm ( type = self . type ) new_term . set_id ( id ) new_term . set_lemma ( lemma ) new_term . set_pos ( pos ) new_term . set_morphofeat ( morphofeat ) new_span = Cspan ( ) for token in tokens : new_span . add_target_id ( token . get_id ( ) ) new_term . set_span ( new_span ) self . add_term ( new_term ) return new_term
Create a new term and add it to the term layer
185
11
6,688
def add_markable ( self , markable_obj ) : if self . markable_layer is None : self . markable_layer = Cmarkables ( type = self . type ) self . root . append ( self . markable_layer . get_node ( ) ) self . markable_layer . add_markable ( markable_obj )
Adds a markable to the markable layer
77
9
6,689
def add_opinion ( self , opinion_obj ) : if self . opinion_layer is None : self . opinion_layer = Copinions ( ) self . root . append ( self . opinion_layer . get_node ( ) ) self . opinion_layer . add_opinion ( opinion_obj )
Adds an opinion to the opinion layer
66
7
6,690
def add_statement ( self , statement_obj ) : if self . attribution_layer is None : self . attribution_layer = Cattribution ( ) self . root . append ( self . attribution_layer . get_node ( ) ) self . attribution_layer . add_statement ( statement_obj )
Adds a statement to the attribution layer
64
7
6,691
def add_predicate ( self , predicate_obj ) : if self . srl_layer is None : self . srl_layer = Csrl ( ) self . root . append ( self . srl_layer . get_node ( ) ) self . srl_layer . add_predicate ( predicate_obj )
Adds a predicate to the semantic layer
70
7
6,692
def add_timex ( self , time_obj ) : if self . timex_layer is None : self . timex_layer = CtimeExpressions ( ) self . root . append ( self . timex_layer . get_node ( ) ) self . timex_layer . add_timex ( time_obj )
Adds a timex entry to the time layer
71
9
6,693
def set_header ( self , header ) : self . header = header self . root . insert ( 0 , header . get_node ( ) )
Sets the header of the object
31
7
6,694
def add_linguistic_processor ( self , layer , my_lp ) : if self . header is None : self . header = CHeader ( type = self . type ) self . root . insert ( 0 , self . header . get_node ( ) ) self . header . add_linguistic_processor ( layer , my_lp )
Adds a linguistic processor to the header
75
7
6,695
def create_linguistic_processor ( self , layer , name , version , * * kwargs ) : lp = Clp ( name = name , version = version , * * kwargs ) self . add_linguistic_processor ( layer , lp ) return lp
Create a new linguistic processor element and add it to the header
62
12
6,696
def add_dependency ( self , my_dep ) : if self . dependency_layer is None : self . dependency_layer = Cdependencies ( ) self . root . append ( self . dependency_layer . get_node ( ) ) self . dependency_layer . add_dependency ( my_dep )
Adds a dependency to the dependency layer
66
7
6,697
def create_dependency ( self , _from , to , function , comment = None ) : new_dependency = Cdependency ( ) new_dependency . set_from ( _from ) new_dependency . set_to ( to ) new_dependency . set_function ( function ) if comment : new_dependency . set_comment ( comment ) self . add_dependency ( new_dependency ) return new_dependency
Create a new dependency object and add it to the dependency layer
95
12
6,698
def add_tlink ( self , my_tlink ) : if self . temporalRelations_layer is None : self . temporalRelations_layer = CtemporalRelations ( ) self . root . append ( self . temporalRelations_layer . get_node ( ) ) self . temporalRelations_layer . add_tlink ( my_tlink )
Adds a tlink to the temporalRelations layer
73
9
6,699
def add_predicateAnchor ( self , my_predAnch ) : if self . temporalRelations_layer is None : self . temporalRelations_layer = CtemporalRelations ( ) self . root . append ( self . temporalRelations_layer . get_node ( ) ) self . temporalRelations_layer . add_predicateAnchor ( my_predAnch )
Adds a predAnch to the temporalRelations layer
81
10