idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
4,600
def download_file ( url ) : response = requests . get ( url ) if response . status_code is not 200 : return None return response . text
Downloads a file from the specified URL .
32
9
4,601
def get_sub_dsp ( self , nodes_bunch , edges_bunch = None ) : # Get real paths. nodes_bunch = [ self . get_node ( u ) [ 1 ] [ 0 ] for u in nodes_bunch ] # Define an empty dispatcher. sub_dsp = self . copy_structure ( dmap = self . dmap . subgraph ( nodes_bunch ) . copy ( ) ) # Namespace shortcuts for speed. nodes , dmap_out_degree = sub_dsp . nodes , sub_dsp . dmap . out_degree dmap_dv , dmap_rm_edge = self . default_values , sub_dsp . dmap . remove_edge dmap_rm_node = sub_dsp . dmap . remove_node # Remove function nodes that has not whole inputs available. for u in nodes_bunch : n = nodes [ u ] . get ( 'inputs' , None ) # Function inputs. # No all inputs if n is not None and not set ( n ) . issubset ( nodes_bunch ) : dmap_rm_node ( u ) # Remove function node. # Remove edges that are not in edges_bunch. if edges_bunch is not None : for e in edges_bunch : # Iterate sub-graph edges. dmap_rm_edge ( * e ) # Remove edge. # Remove function node with no outputs. for u in [ u for u , n in sub_dsp . dmap . nodes . items ( ) if n [ 'type' ] == 'function' ] : # noinspection PyCallingNonCallable if not dmap_out_degree ( u ) : # No outputs. dmap_rm_node ( u ) # Remove function node. from networkx import isolates # Remove isolate nodes from sub-graph. sub_dsp . dmap . remove_nodes_from ( list ( isolates ( sub_dsp . dmap ) ) ) # Set default values. sub_dsp . default_values = { k : dmap_dv [ k ] for k in dmap_dv if k in nodes } return sub_dsp
Returns the sub - dispatcher induced by given node and edge bunches .
479
14
4,602
def data_nodes ( self ) : return { k : v for k , v in self . nodes . items ( ) if v [ 'type' ] == 'data' }
Returns all data nodes of the dispatcher .
38
8
4,603
def function_nodes ( self ) : return { k : v for k , v in self . nodes . items ( ) if v [ 'type' ] == 'function' }
Returns all function nodes of the dispatcher .
38
8
4,604
def sub_dsp_nodes ( self ) : return { k : v for k , v in self . nodes . items ( ) if v [ 'type' ] == 'dispatcher' }
Returns all sub - dispatcher nodes of the dispatcher .
43
10
4,605
def blue ( self , memo = None ) : memo = { } if memo is None else memo if self in memo : return memo [ self ] from . utils . dsp import map_list from . utils . blue import BlueDispatcher , _parent_blue memo [ self ] = blue = BlueDispatcher ( executor = self . executor , name = self . name , raises = self . raises , description = self . __doc__ ) dfl = self . default_values key_map_data = [ 'data_id' , { 'value' : 'default_value' } ] pred , succ = self . dmap . pred , self . dmap . succ def _set_weight ( n , r , d ) : d = { i : j [ 'weight' ] for i , j in d . items ( ) if 'weight' in j } if d : r [ n ] = d for k , v in sorted ( self . nodes . items ( ) , key = lambda x : x [ 1 ] [ 'index' ] ) : v = v . copy ( ) t = v . pop ( 'type' ) del v [ 'index' ] if t == 'data' : method = 'add_data' combine_dicts ( map_list ( key_map_data , k , dfl . get ( k , { } ) ) , base = v ) elif t in ( 'function' , 'dispatcher' ) : method = 'add_%s' % t if t == 'dispatcher' : t = 'dsp' v [ '%s_id' % t ] = k del v [ 'wait_inputs' ] _set_weight ( 'inp_weight' , v , pred [ k ] ) _set_weight ( 'out_weight' , v , succ [ k ] ) if 'function' in v : v [ t ] = _parent_blue ( v . pop ( 'function' ) , memo ) blue . deferred . append ( ( method , v ) ) return blue
Constructs a BlueDispatcher out of the current object .
442
13
4,606
def extend ( self , * blues , memo = None ) : from . utils . blue import BlueDispatcher as Blue return Blue ( ) . extend ( * blues , memo = memo ) . register ( self , memo = memo )
Extends Dispatcher calling each deferred operation of given Blueprints .
49
14
4,607
def dispatch ( self , inputs = None , outputs = None , cutoff = None , inputs_dist = None , wildcard = False , no_call = False , shrink = False , rm_unused_nds = False , select_output_kw = None , _wait_in = None , stopper = None , executor = False , sol_name = ( ) ) : dsp = self if not no_call : if shrink : # Pre shrink. dsp = self . shrink_dsp ( inputs , outputs , cutoff , inputs_dist , wildcard ) elif outputs : dsp = self . get_sub_dsp_from_workflow ( outputs , self . dmap , reverse = True , blockers = inputs , wildcard = wildcard ) # Initialize. self . solution = sol = self . solution . __class__ ( dsp , inputs , outputs , wildcard , cutoff , inputs_dist , no_call , rm_unused_nds , _wait_in , full_name = sol_name ) # Dispatch. sol . _run ( stopper = stopper , executor = executor ) if select_output_kw : return selector ( dictionary = sol , * * select_output_kw ) # Return the evaluated data outputs. return sol
Evaluates the minimum workflow and data outputs of the dispatcher model from given inputs .
273
17
4,608
def shrink_dsp ( self , inputs = None , outputs = None , cutoff = None , inputs_dist = None , wildcard = True ) : bfs = None if inputs : # Get all data nodes no wait inputs. wait_in = self . _get_wait_in ( flag = False ) # Evaluate the workflow graph without invoking functions. o = self . dispatch ( inputs , outputs , cutoff , inputs_dist , wildcard , True , False , True , _wait_in = wait_in ) data_nodes = self . data_nodes # Get data nodes. from . utils . alg import _union_workflow , _convert_bfs bfs = _union_workflow ( o ) # bfg edges. # Set minimum initial distances. if inputs_dist : inputs_dist = combine_dicts ( o . dist , inputs_dist ) else : inputs_dist = o . dist # Set data nodes to wait inputs. wait_in = self . _get_wait_in ( flag = True ) while True : # Start shrinking loop. # Evaluate the workflow graph without invoking functions. o = self . dispatch ( inputs , outputs , cutoff , inputs_dist , wildcard , True , False , False , _wait_in = wait_in ) _union_workflow ( o , bfs = bfs ) # Update bfs. n_d , status = o . _remove_wait_in ( ) # Remove wait input flags. if not status : break # Stop iteration. # Update inputs. inputs = n_d . intersection ( data_nodes ) . union ( inputs ) # Update outputs and convert bfs in DiGraphs. outputs , bfs = outputs or o , _convert_bfs ( bfs ) elif not outputs : return self . copy_structure ( ) # Empty Dispatcher. # Get sub dispatcher breadth-first-search graph. dsp = self . _get_dsp_from_bfs ( outputs , bfs_graphs = bfs ) return dsp
Returns a reduced dispatcher .
439
5
4,609
def _get_dsp_from_bfs ( self , outputs , bfs_graphs = None ) : bfs = bfs_graphs [ NONE ] if bfs_graphs is not None else self . dmap # Get sub dispatcher breadth-first-search graph. dsp = self . get_sub_dsp_from_workflow ( sources = outputs , graph = bfs , reverse = True , _update_links = False ) # Namespace shortcuts. succ , nodes , pred = dsp . dmap . succ , dsp . nodes , dsp . dmap . pred rm_edges , nds = dsp . dmap . remove_edges_from , dsp . data_nodes from . utils . alg import _nodes , _get_sub_out , _update_io for n in dsp . sub_dsp_nodes : a = nodes [ n ] = nodes [ n ] . copy ( ) bfs = bfs_graphs [ n ] if bfs_graphs is not None else None out = _get_sub_out ( a , succ [ n ] ) if 'input_domain' in a : out . update ( _nodes ( a [ 'inputs' ] . values ( ) ) ) a [ 'function' ] = a [ 'function' ] . _get_dsp_from_bfs ( out , bfs ) i , o = _update_io ( a , pred [ n ] , succ [ n ] ) # Unreachable nodes. rm_edges ( { ( u , n ) for u in i } . union ( ( ( n , u ) for u in o ) ) ) return dsp
Returns the sub - dispatcher induced by the workflow from outputs .
370
12
4,610
def add_callback ( self , method ) : from_actor = get_current ( ) if from_actor is not None : callback = ( method , from_actor . channel , from_actor . url ) with self . __condition : if self . __state is not FINISHED : self . __callbacks . append ( callback ) return # Invoke the callback directly # msg = TellRequest(TELL, method, [self], from_actor.url) msg = { TYPE : TELL , METHOD : method , PARAMS : ( [ self ] , { } ) , TO : from_actor . url } from_actor . channel . send ( msg ) else : raise FutureError ( "add_callback only works when called " + "from inside an actor" )
Attaches a mehtod that will be called when the future finishes .
164
15
4,611
def send_work ( self ) : if self . __set_running ( ) : # msg = FutureRequest(FUTURE, self.__method, self.__params, # self.__channel, self.__target, self.__id) msg = { TYPE : FUTURE , METHOD : self . __method , PARAMS : self . __params , CHANNEL : self . __channel , TO : self . __target , RPC_ID : self . __id } self . __actor_channel . send ( msg ) else : raise FutureError ( "Future already running." )
Sends the query to the actor for it to start executing the work .
127
15
4,612
def set_result ( self , result ) : with self . __condition : self . __result = result self . __state = FINISHED self . __condition . notify_all ( ) self . _invoke_callbacks ( )
Sets the return value of work associated with the future . Only called internally .
49
16
4,613
def set_exception ( self , exception ) : with self . __condition : self . __exception = exception self . __state = FINISHED self . __condition . notify_all ( ) self . _invoke_callbacks ( )
Sets the result of the future as being the given exception . Only called internally .
51
17
4,614
def angle_between_vectors ( x , y ) : dp = dot_product ( x , y ) if dp == 0 : return 0 xm = magnitude ( x ) ym = magnitude ( y ) return math . acos ( dp / ( xm * ym ) ) * ( 180. / math . pi )
Compute the angle between vector x and y
72
9
4,615
def _ssh_forward_accept ( ssh_session , timeout_ms ) : ssh_channel = c_ssh_forward_accept ( c_void_p ( ssh_session ) , c_int ( timeout_ms ) ) if ssh_channel is None : raise SshTimeoutException ( ) return ssh_channel
Waiting for an incoming connection from a reverse forwarded port . Note that this results in a kernel block until a connection is received .
67
26
4,616
def execute ( self , cmd , block_size = DEFAULT_EXECUTE_READ_BLOCK_SIZE ) : with SshChannel ( self ) as sc : self . __log . debug ( "Executing command: %s" % ( cmd ) ) sc . open_session ( ) sc . request_exec ( cmd ) buffer_ = bytearray ( ) while 1 : bytes = sc . read ( block_size ) yield bytes if len ( bytes ) < block_size : break
Execute a remote command . This functionality does not support more than one command to be executed on the same channel so we create a dedicated channel at the session level than allowing direct access at the channel level .
106
41
4,617
def _read ( self , directory , filename , session , path , name , extension , spatial , spatialReferenceID , replaceParamFile , force_relative = True ) : self . project_directory = directory with tmp_chdir ( directory ) : # Headers to ignore HEADERS = ( 'GSSHAPROJECT' , ) # WMS Cards to include (don't discount as comments) WMS_CARDS = ( '#INDEXGRID_GUID' , '#PROJECTION_FILE' , '#LandSoil' , '#CHANNEL_POINT_INPUT_WMS' ) GSSHAPY_CARDS = ( '#GSSHAPY_EVENT_YML' , ) with open ( path , 'r' ) as f : for line in f : if not line . strip ( ) : # Skip empty lines continue elif '#' in line . split ( ) [ 0 ] and line . split ( ) [ 0 ] not in WMS_CARDS + GSSHAPY_CARDS : # Skip comments designated by the hash symbol # (with the exception of WMS_CARDS and GSSHAPY_CARDS) continue try : card = self . _extractCard ( line , force_relative ) except : card = self . _extractDirectoryCard ( line , force_relative ) # Now that the cardName and cardValue are separated # load them into the gsshapy objects if card [ 'name' ] not in HEADERS : # Create GSSHAPY Project Card object prjCard = ProjectCard ( name = card [ 'name' ] , value = card [ 'value' ] ) # Associate ProjectCard with ProjectFile prjCard . projectFile = self # Extract MAP_TYPE card value for convenience working # with output maps if card [ 'name' ] == 'MAP_TYPE' : self . mapType = int ( card [ 'value' ] ) # Assign properties self . srid = spatialReferenceID self . name = name self . fileExtension = extension
Project File Read from File Method
442
6
4,618
def _write ( self , session , openFile , replaceParamFile ) : # Enforce cards that must be written in certain order PRIORITY_CARDS = ( 'WMS' , 'MASK_WATERSHED' , 'REPLACE_LINE' , 'REPLACE_PARAMS' , 'REPLACE_VALS' , 'REPLACE_FOLDER' ) filename = os . path . split ( openFile . name ) [ 1 ] name = filename . split ( '.' ) [ 0 ] # Write lines openFile . write ( 'GSSHAPROJECT\n' ) # Write priority lines for card_key in PRIORITY_CARDS : card = self . getCard ( card_key ) # Write the card if card is not None : openFile . write ( card . write ( originalPrefix = self . name , newPrefix = name ) ) # Initiate write on each ProjectCard that belongs to this ProjectFile for card in self . projectCards : if card . name not in PRIORITY_CARDS : openFile . write ( card . write ( originalPrefix = self . name , newPrefix = name ) )
Project File Write to File Method
255
6
4,619
def appendDirectory ( self , directory , projectFilePath ) : lines = [ ] with open ( projectFilePath , 'r' ) as original : for l in original : lines . append ( l ) with open ( projectFilePath , 'w' ) as new : for line in lines : card = { } try : card = self . _extractCard ( line ) except : card = self . _extractDirectoryCard ( line ) # Determine number of spaces between card and value for nice alignment numSpaces = max ( 2 , 25 - len ( card [ 'name' ] ) ) if card [ 'value' ] is None : rewriteLine = '%s\n' % ( card [ 'name' ] ) else : if card [ 'name' ] == 'WMS' : rewriteLine = '%s %s\n' % ( card [ 'name' ] , card [ 'value' ] ) elif card [ 'name' ] == 'PROJECT_PATH' : filePath = '"%s"' % os . path . normpath ( directory ) rewriteLine = '%s%s%s\n' % ( card [ 'name' ] , ' ' * numSpaces , filePath ) elif '"' in card [ 'value' ] : filename = card [ 'value' ] . strip ( '"' ) filePath = '"%s"' % os . path . join ( directory , filename ) rewriteLine = '%s%s%s\n' % ( card [ 'name' ] , ' ' * numSpaces , filePath ) else : rewriteLine = '%s%s%s\n' % ( card [ 'name' ] , ' ' * numSpaces , card [ 'value' ] ) new . write ( rewriteLine )
Append directory to relative paths in project file . By default the project file paths are read and written as relative paths . Use this method to prepend a directory to all the paths in the project file .
383
41
4,620
def readProject ( self , directory , projectFileName , session , spatial = False , spatialReferenceID = None ) : self . project_directory = directory with tmp_chdir ( directory ) : # Add project file to session session . add ( self ) # First read self self . read ( directory , projectFileName , session , spatial = spatial , spatialReferenceID = spatialReferenceID ) # Get the batch directory for output batchDirectory = self . _getBatchDirectory ( directory ) # Automatically derive the spatial reference system, if possible if spatialReferenceID is None : spatialReferenceID = self . _automaticallyDeriveSpatialReferenceId ( directory ) # Read in replace param file replaceParamFile = self . _readReplacementFiles ( directory , session , spatial , spatialReferenceID ) # Read Input Files self . _readXput ( self . INPUT_FILES , directory , session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) # Read Output Files self . _readXput ( self . OUTPUT_FILES , batchDirectory , session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) # Read Input Map Files self . _readXputMaps ( self . INPUT_MAPS , directory , session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) # Read WMS Dataset Files self . _readWMSDatasets ( self . WMS_DATASETS , batchDirectory , session , spatial = spatial , spatialReferenceID = spatialReferenceID ) # Commit to database self . _commit ( session , self . COMMIT_ERROR_MESSAGE )
Read all files for a GSSHA project into the database .
366
13
4,621
def readInput ( self , directory , projectFileName , session , spatial = False , spatialReferenceID = None ) : self . project_directory = directory with tmp_chdir ( directory ) : # Add project file to session session . add ( self ) # Read Project File self . read ( directory , projectFileName , session , spatial , spatialReferenceID ) # Automatically derive the spatial reference system, if possible if spatialReferenceID is None : spatialReferenceID = self . _automaticallyDeriveSpatialReferenceId ( directory ) # Read in replace param file replaceParamFile = self . _readReplacementFiles ( directory , session , spatial , spatialReferenceID ) # Read Input Files self . _readXput ( self . INPUT_FILES , directory , session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) # Read Input Map Files self . _readXputMaps ( self . INPUT_MAPS , directory , session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) # Commit to database self . _commit ( session , self . COMMIT_ERROR_MESSAGE )
Read only input files for a GSSHA project into the database .
251
14
4,622
def readOutput ( self , directory , projectFileName , session , spatial = False , spatialReferenceID = None ) : self . project_directory = directory with tmp_chdir ( directory ) : # Add project file to session session . add ( self ) # Read Project File self . read ( directory , projectFileName , session , spatial , spatialReferenceID ) # Get the batch directory for output batchDirectory = self . _getBatchDirectory ( directory ) # Read Mask (dependency of some output files) maskMap = WatershedMaskFile ( ) maskMapFilename = self . getCard ( 'WATERSHED_MASK' ) . value . strip ( '"' ) maskMap . read ( session = session , directory = directory , filename = maskMapFilename , spatial = spatial ) maskMap . projectFile = self # Automatically derive the spatial reference system, if possible if spatialReferenceID is None : spatialReferenceID = self . _automaticallyDeriveSpatialReferenceId ( directory ) # Read Output Files self . _readXput ( self . OUTPUT_FILES , batchDirectory , session , spatial = spatial , spatialReferenceID = spatialReferenceID ) # Read WMS Dataset Files self . _readWMSDatasets ( self . WMS_DATASETS , batchDirectory , session , spatial = spatial , spatialReferenceID = spatialReferenceID ) # Commit to database self . _commit ( session , self . COMMIT_ERROR_MESSAGE )
Read only output files for a GSSHA project to the database .
314
14
4,623
def _readXputFile ( self , file_cards , card_name , directory , session , spatial = False , spatialReferenceID = None , replaceParamFile = None , * * kwargs ) : # Automatically derive the spatial reference system, if possible if spatialReferenceID is None : spatialReferenceID = self . _automaticallyDeriveSpatialReferenceId ( directory ) card = self . getCard ( card_name ) if card : fileIO = file_cards [ card . name ] filename = card . value . strip ( '"' ) . strip ( "'" ) # Invoke read method on each file return self . _invokeRead ( fileIO = fileIO , directory = directory , filename = filename , session = session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile , * * kwargs )
Read specific IO file for a GSSHA project to the database .
182
14
4,624
def writeProject ( self , session , directory , name ) : self . project_directory = directory with tmp_chdir ( directory ) : # Get the batch directory for output batchDirectory = self . _getBatchDirectory ( directory ) # Get param file for writing replaceParamFile = self . replaceParamFile # Write the replacement files self . _writeReplacementFiles ( session = session , directory = directory , name = name ) # Write Project File self . write ( session = session , directory = directory , name = name ) # Write input files self . _writeXput ( session = session , directory = directory , fileCards = self . INPUT_FILES , name = name , replaceParamFile = replaceParamFile ) # Write output files self . _writeXput ( session = session , directory = batchDirectory , fileCards = self . OUTPUT_FILES , name = name ) # Write input map files self . _writeXputMaps ( session = session , directory = directory , mapCards = self . INPUT_MAPS , name = name , replaceParamFile = replaceParamFile ) # Write WMS Dataset Files self . _writeWMSDatasets ( session = session , directory = batchDirectory , wmsDatasetCards = self . WMS_DATASETS , name = name )
Write all files for a project from the database to file .
282
12
4,625
def writeInput ( self , session , directory , name ) : self . project_directory = directory with tmp_chdir ( directory ) : # Get param file for writing replaceParamFile = self . replaceParamFile # Write Project File self . write ( session = session , directory = directory , name = name ) # Write input files self . _writeXput ( session = session , directory = directory , fileCards = self . INPUT_FILES , name = name , replaceParamFile = replaceParamFile ) # Write input map files self . _writeXputMaps ( session = session , directory = directory , mapCards = self . INPUT_MAPS , name = name , replaceParamFile = replaceParamFile )
Write only input files for a GSSHA project from the database to file .
152
16
4,626
def writeOutput ( self , session , directory , name ) : self . project_directory = directory with tmp_chdir ( directory ) : # Get the batch directory for output batchDirectory = self . _getBatchDirectory ( directory ) # Write the replacement files self . _writeReplacementFiles ( session = session , directory = directory , name = name ) # Write Project File self . write ( session = session , directory = directory , name = name ) # Write output files self . _writeXput ( session = session , directory = batchDirectory , fileCards = self . OUTPUT_FILES , name = name ) # Write WMS Dataset Files self . _writeWMSDatasets ( session = session , directory = batchDirectory , wmsDatasetCards = self . WMS_DATASETS , name = name )
Write only output files for a GSSHA project from the database to file .
179
16
4,627
def getFileKeys ( self ) : files = self . getFileObjects ( ) files_list = [ ] for key , value in files . iteritems ( ) : if value : files_list . append ( key ) return files_list
Retrieve a list of file keys that have been read into the database .
51
15
4,628
def getFileObjects ( self ) : files = { 'project-file' : self , 'mapping-table-file' : self . mapTableFile , 'channel-input-file' : self . channelInputFile , 'precipitation-file' : self . precipFile , 'storm-pipe-network-file' : self . stormPipeNetworkFile , 'hmet-file' : self . hmetFile , 'nwsrfs-file' : self . nwsrfsFile , 'orographic-gage-file' : self . orographicGageFile , 'grid-pipe-file' : self . gridPipeFile , 'grid-stream-file' : self . gridStreamFile , 'time-series-file' : self . timeSeriesFiles , 'projection-file' : self . projectionFile , 'replace-parameters-file' : self . replaceParamFile , 'replace-value-file' : self . replaceValFile , 'output-location-file' : self . outputLocationFiles , 'maps' : self . maps , 'link-node-datasets-file' : self . linkNodeDatasets } return files
Retrieve a dictionary of file objects .
257
8
4,629
def getCard ( self , name ) : cards = self . projectCards for card in cards : if card . name . upper ( ) == name . upper ( ) : return card return None
Retrieve card object for given card name .
40
9
4,630
def deleteCard ( self , card_name , db_session ) : card_name = card_name . upper ( ) gssha_card = self . getCard ( card_name ) if gssha_card is not None : db_session . delete ( gssha_card ) db_session . commit ( )
Removes card from gssha project file
70
9
4,631
def getGridByCard ( self , gssha_card_name ) : with tmp_chdir ( self . project_directory ) : if gssha_card_name not in ( self . INPUT_MAPS + self . WMS_DATASETS ) : raise ValueError ( "Card {0} not found in valid grid cards ..." . format ( gssha_card_name ) ) gssha_grid_card = self . getCard ( gssha_card_name ) if gssha_grid_card is None : raise ValueError ( "{0} card not found ..." . format ( gssha_card_name ) ) gssha_pro_card = self . getCard ( "#PROJECTION_FILE" ) if gssha_pro_card is None : raise ValueError ( "#PROJECTION_FILE card not found ..." ) # return gssha grid return GDALGrid ( gssha_grid_card . value . strip ( '"' ) . strip ( "'" ) , gssha_pro_card . value . strip ( '"' ) . strip ( "'" ) )
Returns GDALGrid object of GSSHA grid
245
10
4,632
def getGrid ( self , use_mask = True ) : grid_card_name = "WATERSHED_MASK" if not use_mask : grid_card_name = "ELEVATION" return self . getGridByCard ( grid_card_name )
Returns GDALGrid object of GSSHA model bounds
61
11
4,633
def getIndexGrid ( self , name ) : index_map = self . mapTableFile . indexMaps . filter_by ( name = name ) . one ( ) gssha_pro_card = self . getCard ( "#PROJECTION_FILE" ) if gssha_pro_card is None : raise ValueError ( "#PROJECTION_FILE card not found ..." ) with tmp_chdir ( self . project_directory ) : # return gssha grid return GDALGrid ( index_map . filename , gssha_pro_card . value . strip ( '"' ) . strip ( "'" ) )
Returns GDALGrid object of index map
134
8
4,634
def getWkt ( self ) : gssha_pro_card = self . getCard ( "#PROJECTION_FILE" ) if gssha_pro_card is None : raise ValueError ( "#PROJECTION_FILE card not found ..." ) with tmp_chdir ( self . project_directory ) : gssha_prj_file = gssha_pro_card . value . strip ( '"' ) . strip ( "'" ) with open ( gssha_prj_file ) as pro_file : wkt_string = pro_file . read ( ) return wkt_string
Returns GSSHA projection WKT string
132
8
4,635
def getOutlet ( self ) : # OUTROW, OUTCOL outrow = int ( self . getCard ( name = 'OUTROW' ) . value ) - 1 outcol = int ( self . getCard ( name = 'OUTCOL' ) . value ) - 1 gssha_grid = self . getGrid ( ) return gssha_grid . pixel2lonlat ( outcol , outrow )
Gets the outlet latitude and longitude .
91
9
4,636
def setOutlet ( self , col , row , outslope = None ) : #OUTROW, OUTCOL, OUTSLOPE gssha_grid = self . getGrid ( ) # col, row = gssha_grid.lonlat2pixel(longitude, latitude) # add 1 to row & col becasue GSSHA is 1-based self . setCard ( name = 'OUTROW' , value = str ( row ) ) self . setCard ( name = 'OUTCOL' , value = str ( col ) ) if outslope is None : self . calculateOutletSlope ( ) else : self . setCard ( name = 'OUTSLOPE' , value = str ( outslope ) )
Sets the outlet grid cell information in the project file .
160
12
4,637
def findOutlet ( self , shapefile_path ) : # determine outlet from shapefile # by getting outlet from first point in polygon # make sure the boundary geometry is valid check_watershed_boundary_geometry ( shapefile_path ) shapefile = ogr . Open ( shapefile_path ) source_layer = shapefile . GetLayer ( 0 ) source_lyr_proj = source_layer . GetSpatialRef ( ) osr_geographic_proj = osr . SpatialReference ( ) osr_geographic_proj . ImportFromEPSG ( 4326 ) proj_transform = osr . CoordinateTransformation ( source_lyr_proj , osr_geographic_proj ) boundary_feature = source_layer . GetFeature ( 0 ) feat_geom = boundary_feature . GetGeometryRef ( ) feat_geom . Transform ( proj_transform ) polygon = shapely_loads ( feat_geom . ExportToWkb ( ) ) # make lowest point on boundary outlet mask_grid = self . getGrid ( ) elevation_grid = self . getGrid ( use_mask = False ) elevation_array = elevation_grid . np_array ( ) ma_elevation_array = np . ma . array ( elevation_array , mask = mask_grid . np_array ( ) == 0 ) min_elevation = sys . maxsize outlet_pt = None for coord in list ( polygon . exterior . coords ) : try : col , row = mask_grid . lonlat2pixel ( * coord ) except IndexError : # out of bounds continue elevation_value = ma_elevation_array [ row , col ] if elevation_value is np . ma . masked : # search for closest value in mask to this point # elevation within 5 pixels in any direction actual_value = elevation_array [ row , col ] max_diff = sys . maxsize nrow = None ncol = None nval = None for row_ix in range ( max ( row - 5 , 0 ) , min ( row + 5 , mask_grid . y_size ) ) : for col_ix in range ( max ( col - 5 , 0 ) , min ( col + 5 , mask_grid . x_size ) ) : val = ma_elevation_array [ row_ix , col_ix ] if not val is np . ma . masked : val_diff = abs ( val - actual_value ) if val_diff < max_diff : max_diff = val_diff nval = val nrow = row_ix ncol = col_ix if None not in ( nrow , ncol , nval ) : row = nrow col = ncol elevation_value = nval if elevation_value < min_elevation : min_elevation = elevation_value outlet_pt = ( col , row ) if outlet_pt is None : raise IndexError ( 'No valid outlet points found on boundary ...' ) outcol , outrow = outlet_pt self . setOutlet ( col = outcol + 1 , row = outrow + 1 )
Calculate outlet location
677
5
4,638
def calculateOutletSlope ( self ) : try : mask_grid = self . getGrid ( ) elevation_grid = self . getGrid ( use_mask = False ) outrow = int ( self . getCard ( "OUTROW" ) . value ) - 1 outcol = int ( self . getCard ( "OUTCOL" ) . value ) - 1 cell_size = float ( self . getCard ( "GRIDSIZE" ) . value ) min_row = max ( 0 , outrow - 1 ) max_row = min ( mask_grid . x_size , outrow + 2 ) min_col = max ( 0 , outcol - 1 ) max_col = min ( mask_grid . y_size , outcol + 2 ) mask_array = mask_grid . np_array ( ) mask_array [ outrow , outcol ] = 0 mask_array = mask_array [ min_row : max_row , min_col : max_col ] mask_array = ( mask_array == 0 ) elevation_array = elevation_grid . np_array ( ) original_elevation = elevation_array [ outrow , outcol ] elevation_array = elevation_array [ min_row : max_row , min_col : max_col ] slope_calc_array = ( elevation_array - original_elevation ) / cell_size #NOTE: Ignoring distance to cells at angles. Assuming to small to matter mask_array [ slope_calc_array <= 0 ] = True slope_mask_array = np . ma . array ( slope_calc_array , mask = mask_array ) outslope = slope_mask_array . mean ( ) if outslope is np . ma . masked or outslope < 0.001 : outslope = 0.001 except ValueError : outslope = 0.001 self . setCard ( "OUTSLOPE" , str ( outslope ) )
Attempt to determine the slope at the OUTLET
423
9
4,639
def timezone ( self ) : if self . _tz is None : # GET CENTROID FROM GSSHA GRID cen_lat , cen_lon = self . centerLatLon ( ) # update time zone tf = TimezoneFinder ( ) tz_name = tf . timezone_at ( lng = cen_lon , lat = cen_lat ) self . _tz = timezone ( tz_name ) return self . _tz
timezone of GSSHA model
101
7
4,640
def _getBatchDirectory ( self , projectRootDirectory ) : # Set output directory to main directory as default batchDirectory = projectRootDirectory # Get the replace folder card replaceFolderCard = self . getCard ( 'REPLACE_FOLDER' ) if replaceFolderCard : replaceDir = replaceFolderCard . value . strip ( '"' ) batchDirectory = os . path . join ( batchDirectory , replaceDir ) # Create directory if it doesn't exist if not os . path . isdir ( batchDirectory ) : os . mkdir ( batchDirectory ) log . info ( 'Creating directory for batch output: {0}' . format ( batchDirectory ) ) return batchDirectory
Check the project file for the REPLACE_FOLDER card . If it exists append it s value to create the batch directory path . This is the directory output is written to when run in batch mode .
142
42
4,641
def _readXput ( self , fileCards , directory , session , spatial = False , spatialReferenceID = 4236 , replaceParamFile = None ) : ## NOTE: This function is dependent on the project file being read first # Read Input/Output Files for card in self . projectCards : if ( card . name in fileCards ) and self . _noneOrNumValue ( card . value ) and fileCards [ card . name ] : fileIO = fileCards [ card . name ] filename = card . value . strip ( '"' ) # Invoke read method on each file self . _invokeRead ( fileIO = fileIO , directory = directory , filename = filename , session = session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile )
GSSHAPY Project Read Files from File Method
170
11
4,642
def _readXputMaps ( self , mapCards , directory , session , spatial = False , spatialReferenceID = 4236 , replaceParamFile = None ) : if self . mapType in self . MAP_TYPES_SUPPORTED : for card in self . projectCards : if ( card . name in mapCards ) and self . _noneOrNumValue ( card . value ) : filename = card . value . strip ( '"' ) # Invoke read method on each map self . _invokeRead ( fileIO = RasterMapFile , directory = directory , filename = filename , session = session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) else : for card in self . projectCards : if ( card . name in mapCards ) and self . _noneOrNumValue ( card . value ) : filename = card . value . strip ( '"' ) fileExtension = filename . split ( '.' ) [ 1 ] if fileExtension in self . ALWAYS_READ_AND_WRITE_MAPS : # Invoke read method on each map self . _invokeRead ( fileIO = RasterMapFile , directory = directory , filename = filename , session = session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) log . warning ( 'Could not read map files. ' 'MAP_TYPE {0} not supported.' . format ( self . mapType ) )
GSSHA Project Read Map Files from File Method
315
10
4,643
def _readWMSDatasets ( self , datasetCards , directory , session , spatial = False , spatialReferenceID = 4236 ) : if self . mapType in self . MAP_TYPES_SUPPORTED : # Get Mask Map dependency maskMap = session . query ( RasterMapFile ) . filter ( RasterMapFile . projectFile == self ) . filter ( RasterMapFile . fileExtension == 'msk' ) . one ( ) for card in self . projectCards : if ( card . name in datasetCards ) and self . _noneOrNumValue ( card . value ) : # Get filename from project file filename = card . value . strip ( '"' ) path = os . path . join ( directory , filename ) if os . path . isfile ( path ) : wmsDatasetFile = WMSDatasetFile ( ) wmsDatasetFile . projectFile = self wmsDatasetFile . read ( directory = directory , filename = filename , session = session , maskMap = maskMap , spatial = spatial , spatialReferenceID = spatialReferenceID ) else : self . _readBatchOutputForFile ( directory , WMSDatasetFile , filename , session , spatial , spatialReferenceID , maskMap = maskMap )
Method to handle the special case of WMS Dataset Files . WMS Dataset Files cannot be read in independently as other types of file can . They rely on the Mask Map file for some parameters .
275
43
4,644
def _readBatchOutputForFile ( self , directory , fileIO , filename , session , spatial , spatialReferenceID , replaceParamFile = None , maskMap = None ) : # Get contents of directory directoryList = os . listdir ( directory ) # Compile a list of files with that include the filename in them batchFiles = [ ] for thing in directoryList : if filename in thing : batchFiles . append ( thing ) numFilesRead = 0 for batchFile in batchFiles : instance = fileIO ( ) instance . projectFile = self if isinstance ( instance , WMSDatasetFile ) : instance . read ( directory = directory , filename = batchFile , session = session , maskMap = maskMap , spatial = spatial , spatialReferenceID = spatialReferenceID ) else : instance . read ( directory , batchFile , session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) # Increment runCounter for next file numFilesRead += 1 # Issue warnings if '[' in filename or ']' in filename : log . info ( 'A file cannot be read, because the path to the ' 'file in the project file has been replaced with ' 'replacement variable {0}.' . format ( filename ) ) elif numFilesRead == 0 : log . warning ( '{0} listed in project file, but no such ' 'file exists.' . format ( filename ) ) else : log . info ( 'Batch mode output detected. {0} files read ' 'for file {1}' . format ( numFilesRead , filename ) )
When batch mode is run in GSSHA the files of the same type are prepended with an integer to avoid filename conflicts . This will attempt to read files in this format and throw warnings if the files aren t found .
336
45
4,645
def _invokeRead ( self , fileIO , directory , filename , session , spatial = False , spatialReferenceID = 4236 , replaceParamFile = None , * * kwargs ) : path = os . path . join ( directory , filename ) if os . path . isfile ( path ) : instance = fileIO ( ) instance . projectFile = self instance . read ( directory , filename , session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile , * * kwargs ) return instance else : self . _readBatchOutputForFile ( directory , fileIO , filename , session , spatial , spatialReferenceID , replaceParamFile )
Invoke File Read Method on Other Files
144
8
4,646
def _writeXput ( self , session , directory , fileCards , name = None , replaceParamFile = None ) : for card in self . projectCards : if ( card . name in fileCards ) and self . _noneOrNumValue ( card . value ) and fileCards [ card . name ] : fileIO = fileCards [ card . name ] filename = card . value . strip ( '"' ) # Check for replacement variables if '[' in filename or ']' in filename : log . info ( 'The file for project card {0} cannot be ' 'written, because the path has been replaced ' 'with replacement variable {1}.' . format ( card . name , filename ) ) return # Determine new filename filename = self . _replaceNewFilename ( filename = filename , name = name ) # Invoke write method on each file self . _invokeWrite ( fileIO = fileIO , session = session , directory = directory , filename = filename , replaceParamFile = replaceParamFile )
GSSHA Project Write Files to File Method
214
9
4,647
def _writeXputMaps ( self , session , directory , mapCards , name = None , replaceParamFile = None ) : if self . mapType in self . MAP_TYPES_SUPPORTED : for card in self . projectCards : if ( card . name in mapCards ) and self . _noneOrNumValue ( card . value ) : filename = card . value . strip ( '"' ) # Determine new filename filename = self . _replaceNewFilename ( filename , name ) # Write map file self . _invokeWrite ( fileIO = RasterMapFile , session = session , directory = directory , filename = filename , replaceParamFile = replaceParamFile ) else : for card in self . projectCards : if ( card . name in mapCards ) and self . _noneOrNumValue ( card . value ) : filename = card . value . strip ( '"' ) fileExtension = filename . split ( '.' ) [ 1 ] if fileExtension in self . ALWAYS_READ_AND_WRITE_MAPS : # Determine new filename filename = self . _replaceNewFilename ( filename , name ) # Write map file self . _invokeWrite ( fileIO = RasterMapFile , session = session , directory = directory , filename = filename , replaceParamFile = replaceParamFile ) log . error ( 'Could not write map files. MAP_TYPE {0} ' 'not supported.' . format ( self . mapType ) )
GSSHAPY Project Write Map Files to File Method
312
12
4,648
def _writeWMSDatasets ( self , session , directory , wmsDatasetCards , name = None ) : if self . mapType in self . MAP_TYPES_SUPPORTED : for card in self . projectCards : if ( card . name in wmsDatasetCards ) and self . _noneOrNumValue ( card . value ) : filename = card . value . strip ( '"' ) # Determine new filename filename = self . _replaceNewFilename ( filename , name ) # Handle case where fileIO interfaces with multiple files # Retrieve File using FileIO and file extension extension = filename . split ( '.' ) [ 1 ] # Get mask map file maskMap = session . query ( RasterMapFile ) . filter ( RasterMapFile . projectFile == self ) . filter ( RasterMapFile . fileExtension == 'msk' ) . one ( ) # Default wms dataset wmsDataset = None try : wmsDataset = session . query ( WMSDatasetFile ) . filter ( WMSDatasetFile . projectFile == self ) . filter ( WMSDatasetFile . fileExtension == extension ) . one ( ) except NoResultFound : # Handle case when there is no file in database but # the card is listed in the project file log . warning ( '{0} listed as card in project file, ' 'but the file is not found in the database.' . format ( filename ) ) except MultipleResultsFound : # Write all instances self . _invokeWriteForMultipleOfType ( directory , extension , WMSDatasetFile , filename , session , maskMap = maskMap ) return # Initiate Write Method on File if wmsDataset is not None and maskMap is not None : wmsDataset . write ( session = session , directory = directory , name = filename , maskMap = maskMap ) else : log . error ( 'Could not write WMS Dataset files. ' 'MAP_TYPE {0} not supported.' . format ( self . mapType ) )
GSSHAPY Project Write WMS Datasets to File Method
446
15
4,649
def _writeReplacementFiles ( self , session , directory , name ) : if self . replaceParamFile : self . replaceParamFile . write ( session = session , directory = directory , name = name ) if self . replaceValFile : self . replaceValFile . write ( session = session , directory = directory , name = name )
Write the replacement files
70
4
4,650
def _invokeWrite ( self , fileIO , session , directory , filename , replaceParamFile ) : # Default value for instance instance = None try : # Handle case where fileIO interfaces with single file # Retrieve File using FileIO instance = session . query ( fileIO ) . filter ( fileIO . projectFile == self ) . one ( ) except : # Handle case where fileIO interfaces with multiple files # Retrieve File using FileIO and file extension extension = filename . split ( '.' ) [ 1 ] try : instance = session . query ( fileIO ) . filter ( fileIO . projectFile == self ) . filter ( fileIO . fileExtension == extension ) . one ( ) except NoResultFound : # Handle case when there is no file in database but the # card is listed in the project file log . warning ( '{0} listed as card in project file, but ' 'the file is not found in the database.' . format ( filename ) ) except MultipleResultsFound : self . _invokeWriteForMultipleOfType ( directory , extension , fileIO , filename , session , replaceParamFile = replaceParamFile ) return # Initiate Write Method on File if instance is not None : instance . write ( session = session , directory = directory , name = filename , replaceParamFile = replaceParamFile )
Invoke File Write Method on Other Files
274
8
4,651
def write ( self , originalPrefix , newPrefix = None ) : # Determine number of spaces between card and value for nice alignment numSpaces = max ( 2 , 25 - len ( self . name ) ) # Handle special case of booleans if self . value is None : line = '%s\n' % self . name else : if self . name == 'WMS' : line = '%s %s\n' % ( self . name , self . value ) elif newPrefix is None : line = '%s%s%s\n' % ( self . name , ' ' * numSpaces , self . value ) elif originalPrefix in self . value : line = '%s%s%s\n' % ( self . name , ' ' * numSpaces , self . value . replace ( originalPrefix , newPrefix ) ) else : line = '%s%s%s\n' % ( self . name , ' ' * numSpaces , self . value ) return line
Write project card to string .
225
6
4,652
def elapsed ( self ) : if not self . started or self . _start_time is None : return 0.0 return self . _timing_data [ - 1 ] [ 0 ] - self . _start_time
Returns the number of seconds it has been since the start until the latest entry .
47
16
4,653
def rate_unstable ( self ) : if not self . started or self . stalled : return 0.0 x1 , y1 = self . _timing_data [ - 2 ] x2 , y2 = self . _timing_data [ - 1 ] return ( y2 - y1 ) / ( x2 - x1 )
Returns an unstable rate based on the last two entries in the timing data . Less intensive to compute .
73
20
4,654
def rate_overall ( self ) : elapsed = self . elapsed return self . rate if not elapsed else self . numerator / self . elapsed
Returns the overall average rate based on the start time .
30
11
4,655
def _calculate ( self ) : # Calculate means and standard deviations. mean_x = sum ( i [ 0 ] for i in self . _timing_data ) / len ( self . _timing_data ) mean_y = sum ( i [ 1 ] for i in self . _timing_data ) / len ( self . _timing_data ) std_x = sqrt ( sum ( pow ( i [ 0 ] - mean_x , 2 ) for i in self . _timing_data ) / ( len ( self . _timing_data ) - 1 ) ) std_y = sqrt ( sum ( pow ( i [ 1 ] - mean_y , 2 ) for i in self . _timing_data ) / ( len ( self . _timing_data ) - 1 ) ) # Calculate coefficient. sum_xy , sum_sq_v_x , sum_sq_v_y = 0 , 0 , 0 for x , y in self . _timing_data : x -= mean_x y -= mean_y sum_xy += x * y sum_sq_v_x += pow ( x , 2 ) sum_sq_v_y += pow ( y , 2 ) pearson_r = sum_xy / sqrt ( sum_sq_v_x * sum_sq_v_y ) # Calculate regression line. y = mx + b where m is the slope and b is the y-intercept. m = self . rate = pearson_r * ( std_y / std_x ) if self . undefined : return y = self . denominator b = mean_y - m * mean_x x = ( y - b ) / m # Calculate fitted line (transformed/shifted regression line horizontally). fitted_b = self . _timing_data [ - 1 ] [ 1 ] - ( m * self . _timing_data [ - 1 ] [ 0 ] ) fitted_x = ( y - fitted_b ) / m adjusted_x = ( ( fitted_x - x ) * ( self . numerator / self . denominator ) ) + x self . eta_epoch = adjusted_x
Perform the ETA and rate calculation .
476
9
4,656
def _read ( self , directory , filename , session , path , name , extension , spatial , spatialReferenceID , replaceParamFile ) : # Set file extension property self . fileExtension = extension # Open file and parse into HmetRecords with open ( path , 'r' ) as hmetFile : for line in hmetFile : sline = line . strip ( ) . split ( ) try : # Extract data time from record dateTime = datetime ( int ( sline [ 0 ] ) , int ( sline [ 1 ] ) , int ( sline [ 2 ] ) , int ( sline [ 3 ] ) ) # Intitialize GSSHAPY HmetRecord object hmetRecord = HmetRecord ( hmetDateTime = dateTime , barometricPress = sline [ 4 ] , relHumidity = sline [ 5 ] , totalSkyCover = sline [ 6 ] , windSpeed = sline [ 7 ] , dryBulbTemp = sline [ 8 ] , directRad = sline [ 9 ] , globalRad = sline [ 10 ] ) # Associate HmetRecord with HmetFile hmetRecord . hmetFile = self except : pass
Read HMET WES from File Method
252
8
4,657
def _write ( self , session , openFile , replaceParamFile ) : ## TODO: Ensure Other HMET Formats are supported hmetRecords = self . hmetRecords for record in hmetRecords : openFile . write ( '%s\t%s\t%s\t%s\t%.3f\t%s\t%s\t%s\t%s\t%.2f\t%.2f\n' % ( record . hmetDateTime . year , record . hmetDateTime . month , record . hmetDateTime . day , record . hmetDateTime . hour , record . barometricPress , record . relHumidity , record . totalSkyCover , record . windSpeed , record . dryBulbTemp , record . directRad , record . globalRad ) )
Write HMET WES to File Method
180
8
4,658
def _read ( self , directory , filename , session , path , name , extension , spatial = None , spatialReferenceID = None , replaceParamFile = None ) : yml_events = [ ] with open ( path ) as fo : yml_events = yaml . load ( fo ) for yml_event in yml_events : if os . path . exists ( os . path . join ( directory , yml_event . subfolder ) ) : orm_event = yml_event . as_orm ( ) if not self . _similar_event_exists ( orm_event . subfolder ) : session . add ( orm_event ) self . events . append ( orm_event ) session . commit ( )
ProjectFileEvent Read from File Method
159
7
4,659
def as_yml ( self ) : return YmlFileEvent ( name = str ( self . name ) , subfolder = str ( self . subfolder ) )
Return yml compatible version of self
35
7
4,660
def prepare_request ( node ) : if node . resource . method not in AVAILABLE_METHODS : raise UnsupportedHTTPMethodError ( node . resource . method ) def request ( data = None , json = None , * * kwargs ) : """ Make request to node's API route with the given keyword arguments """ # validate given query parameters for key , value in kwargs . items ( ) : param = next ( ( p for p in node . resource . query_params if p . name == key ) , None ) if not param : raise UnsupportedQueryParameter ( node . resource . path , key ) if not match_type ( value , param . type ) : raise TypeError ( "Resource Query Parameter has type '{0}' but expected type '{1}'" . format ( value . __class__ . __name__ , param . type ) ) response = requests . request ( node . resource . method , node . resource . absolute_uri , params = kwargs , data = data , json = json ) return response return request
Prepare request to node s API route
227
8
4,661
def define_plot_data ( data , x_name , * y_names ) : it = [ ] for k in y_names : it . append ( { 'x' : data [ x_name ] , 'y' : data [ k ] , 'name' : k } ) return it
Defines the data to be plotted .
64
8
4,662
def plot_lines ( it ) : data = [ go . Scatter ( mode = 'lines' , * * d ) for d in it ] return py . iplot ( data , filename = 'scatter-mode' )
Plotting lines .
48
4
4,663
def _ssh_channel_read ( ssh_channel_int , count , is_stderr ) : buffer_ = create_string_buffer ( count ) while 1 : received_bytes = c_ssh_channel_read ( ssh_channel_int , cast ( buffer_ , c_void_p ) , c_uint32 ( count ) , c_int ( int ( is_stderr ) ) ) if received_bytes == SSH_ERROR : ssh_session_int = _ssh_channel_get_session ( ssh_channel_int ) error = ssh_get_error ( ssh_session_int ) raise SshError ( "Channel read failed: %s" % ( error ) ) # BUG: We're not using the nonblocking variant, but this can still # return SSH_AGAIN due to that call's broken dependencies. # TODO: This call might return SSH_AGAIN, even though we should always be # blocking. Reported as bug #115. elif received_bytes == SSH_AGAIN : continue else : break # TODO: Where is the timeout configured for the read? return buffer_ . raw [ 0 : received_bytes ]
Do a read on a channel .
250
7
4,664
def eventChunk ( key , lines ) : ## NOTE: RADAR file format not supported currently. ## TODO: Add Support for RADAR file format type values # Contants KEYWORDS = ( 'EVENT' , 'NRPDS' , 'NRGAG' , 'COORD' , 'GAGES' , 'ACCUM' , 'RATES' , 'RADAR' ) NUM_CARDS = ( 'NRPDS' , 'NRGAG' ) VALUE_CARDS = ( 'GAGES' , 'ACCUM' , 'RATES' , 'RADAR' ) # Define result object result = { 'description' : None , 'nrgag' : None , 'nrpds' : None , 'coords' : [ ] , 'valLines' : [ ] } chunks = pt . chunk ( KEYWORDS , lines ) # Parse chunks associated with each key for card , chunkList in iteritems ( chunks ) : # Parse each chunk in the chunk list for chunk in chunkList : schunk = chunk [ 0 ] . strip ( ) . split ( ) # Cases if card == 'EVENT' : # EVENT handler schunk = pt . splitLine ( chunk [ 0 ] ) result [ 'description' ] = schunk [ 1 ] elif card in NUM_CARDS : # Num cards handler result [ card . lower ( ) ] = schunk [ 1 ] elif card == 'COORD' : # COORD handler schunk = pt . splitLine ( chunk [ 0 ] ) try : # Extract the event description desc = schunk [ 3 ] except : # Handle case where the event description is blank desc = "" coord = { 'x' : schunk [ 1 ] , 'y' : schunk [ 2 ] , 'description' : desc } result [ 'coords' ] . append ( coord ) elif card in VALUE_CARDS : # Value cards handler # Extract DateTime dateTime = datetime ( year = int ( schunk [ 1 ] ) , month = int ( schunk [ 2 ] ) , day = int ( schunk [ 3 ] ) , hour = int ( schunk [ 4 ] ) , minute = int ( schunk [ 5 ] ) ) # Compile values into a list values = [ ] for index in range ( 6 , len ( schunk ) ) : values . append ( schunk [ index ] ) valueLine = { 'type' : schunk [ 0 ] , 'dateTime' : dateTime , 'values' : values } result [ 'valLines' ] . append ( valueLine ) return result
Parse EVENT chunks
558
4
4,665
def request ( self , method , path , params = None , headers = None , body = None ) : if not headers : headers = { } if not params : params = { } headers [ "Accept" ] = "application/json" headers [ "Accept-Version" ] = "^1.15.0" if self . auth_token : headers [ "Authorization" ] = "Bearer {0}" . format ( self . auth_token ) path = self . url + path params = self . flatten_params ( params ) response = requests . request ( method , path , params = params , headers = headers , json = body ) result = response . text try : result = response . json ( ) except Exception : pass if response . status_code >= 400 : raise LosantError ( response . status_code , result ) return result
Base method for making a Losant API request
179
9
4,666
def flatten_params ( self , data , base_key = None ) : result = { } if data is None : return result map_data = None if not isinstance ( data , collections . Mapping ) : map_data = [ ] for idx , val in enumerate ( data ) : map_data . append ( [ str ( idx ) , val ] ) else : map_data = list ( data . items ( ) ) for key , value in map_data : if not base_key is None : key = base_key + "[" + key + "]" if isinstance ( value , basestring ) or not hasattr ( value , "__iter__" ) : result [ key ] = value else : result . update ( self . flatten_params ( value , key ) ) return result
Flatten out nested arrays and dicts in query params into correct format
174
14
4,667
def read_excel ( input_fpath ) : return { k : v . values for k , v in pd . read_excel ( input_fpath ) . items ( ) }
Reads the excel file .
42
6
4,668
def save_outputs ( outputs , output_fpath ) : df = pd . DataFrame ( outputs ) with pd . ExcelWriter ( output_fpath ) as writer : df . to_excel ( writer )
Save model outputs in an Excel file .
48
8
4,669
def _read ( self , directory , filename , session , path , name , extension , spatial , spatialReferenceID , replaceParamFile ) : # Set file extension property self . fileExtension = extension # Dictionary of keywords/cards and parse function names KEYWORDS = ( 'EVENT' , ) # Parse file into chunks associated with keywords/cards with open ( path , 'r' ) as f : chunks = pt . chunk ( KEYWORDS , f ) # Parse chunks associated with each key for key , chunkList in iteritems ( chunks ) : # Parse each chunk in the chunk list for chunk in chunkList : result = gak . eventChunk ( key , chunk ) self . _createGsshaPyObjects ( result ) # Add this PrecipFile to the database session session . add ( self )
Precipitation Read from File Method
174
8
4,670
def _write ( self , session , openFile , replaceParamFile ) : # Retrieve the events associated with this PrecipFile events = self . precipEvents # Write each event to file for event in events : openFile . write ( 'EVENT "%s"\nNRGAG %s\nNRPDS %s\n' % ( event . description , event . nrGag , event . nrPds ) ) if event . nrGag > 0 : values = event . values valList = [ ] # Convert PrecipValue objects into a list of dictionaries, valList, # so that it is compatible with the pivot function. for value in values : valList . append ( { 'ValueType' : value . valueType , 'DateTime' : value . dateTime , 'Gage' : value . gage . id , 'Value' : value . value } ) # Pivot using the function found at: # code.activestate.com/recipes/334695 pivotedValues = pivot . pivot ( valList , ( 'DateTime' , 'ValueType' ) , ( 'Gage' , ) , 'Value' ) ## TODO: Create custom pivot function that can work with sqlalchemy ## objects explicitly without the costly conversion. # Create an empty set for obtaining a list of unique gages gages = session . query ( PrecipGage ) . filter ( PrecipGage . event == event ) . order_by ( PrecipGage . id ) . all ( ) for gage in gages : openFile . write ( 'COORD %s %s "%s"\n' % ( gage . x , gage . y , gage . description ) ) # Write the value rows out to file for row in pivotedValues : # Extract the PrecipValues valString = '' # Retreive a list of sorted keys. This assumes the values are # read into the database in order keys = sorted ( [ key for key in row if key != 'DateTime' and key != 'ValueType' ] ) # String all of the values together into valString for key in keys : if key != 'DateTime' and key != 'ValueType' : valString = '%s %.3f' % ( valString , row [ key ] ) # Write value line to file with appropriate formatting openFile . write ( '%s %.4d %.2d %.2d %.2d %.2d%s\n' % ( row [ 'ValueType' ] , row [ 'DateTime' ] . year , row [ 'DateTime' ] . month , row [ 'DateTime' ] . day , row [ 'DateTime' ] . hour , row [ 'DateTime' ] . minute , valString ) )
Precipitation File Write to File Method
598
9
4,671
def _createGsshaPyObjects ( self , eventChunk ) : ## TODO: Add Support for RADAR file format type values # Create GSSHAPY PrecipEvent event = PrecipEvent ( description = eventChunk [ 'description' ] , nrGag = eventChunk [ 'nrgag' ] , nrPds = eventChunk [ 'nrpds' ] ) # Associate PrecipEvent with PrecipFile event . precipFile = self gages = [ ] for coord in eventChunk [ 'coords' ] : # Create GSSHAPY PrecipGage object gage = PrecipGage ( description = coord [ 'description' ] , x = coord [ 'x' ] , y = coord [ 'y' ] ) # Associate PrecipGage with PrecipEvent gage . event = event # Append to gages list for association with PrecipValues gages . append ( gage ) for valLine in eventChunk [ 'valLines' ] : for index , value in enumerate ( valLine [ 'values' ] ) : # Create GSSHAPY PrecipValue object val = PrecipValue ( valueType = valLine [ 'type' ] , dateTime = valLine [ 'dateTime' ] , value = value ) # Associate PrecipValue with PrecipEvent and PrecipGage val . event = event val . gage = gages [ index ]
Create GSSHAPY PrecipEvent PrecipValue and PrecipGage Objects Method
311
19
4,672
def lookupSpatialReferenceID ( cls , directory , filename ) : path = os . path . join ( directory , filename ) with open ( path , 'r' ) as f : srid = lookupSpatialReferenceID ( f . read ( ) ) return srid
Look up spatial reference system using the projection file .
57
10
4,673
def _read ( self , directory , filename , session , path , name , extension , spatial , spatialReferenceID , replaceParamFile ) : # Set file extension property self . fileExtension = extension # Open file and parse into a data structure with io_open ( path , 'r' ) as f : self . projection = f . read ( )
Projection File Read from File Method
73
7
4,674
def _write ( self , session , openFile , replaceParamFile ) : # Write lines openFile . write ( text ( self . projection ) )
Projection File Write to File Method
31
7
4,675
def numerator ( self , value ) : # If ETA is every iteration, don't do anything fancy. if self . eta_every <= 1 : self . _eta . numerator = value self . _eta_string = self . _generate_eta ( self . _eta . eta_seconds ) return # If ETA is not every iteration, unstable rate is used. If this bar is undefined, no point in calculating ever. if self . _eta . undefined : self . _eta . set_numerator ( value , calculate = False ) return # Calculate if this iteration is the right one. if self . _eta_count >= self . eta_every : self . _eta_count = 1 self . _eta . numerator = value self . _eta_string = self . _generate_eta ( self . _eta . eta_seconds ) return self . _eta_count += 1 self . _eta . set_numerator ( value , calculate = False )
Sets a new numerator and generates the ETA . Must be greater than or equal to previous numerator .
214
23
4,676
def rate ( self ) : return float ( self . _eta . rate_unstable if self . eta_every > 1 else self . _eta . rate )
Returns the rate of the progress as a float . Selects the unstable rate if eta_every > 1 for performance .
35
25
4,677
def generateFromRaster ( self , elevation_raster , shapefile_path = None , out_elevation_grid = None , resample_method = gdalconst . GRA_Average , load_raster_to_db = True ) : if not self . projectFile : raise ValueError ( "Must be connected to project file ..." ) # make sure paths are absolute as the working directory changes elevation_raster = os . path . abspath ( elevation_raster ) shapefile_path = os . path . abspath ( shapefile_path ) # must match elevation mask grid mask_grid = self . projectFile . getGrid ( ) if out_elevation_grid is None : out_elevation_grid = '{0}.{1}' . format ( self . projectFile . name , self . fileExtension ) elevation_grid = resample_grid ( elevation_raster , mask_grid , resample_method = resample_method , as_gdal_grid = True ) with tmp_chdir ( self . projectFile . project_directory ) : elevation_grid . to_grass_ascii ( out_elevation_grid , print_nodata = False ) # read raster into object if load_raster_to_db : self . _load_raster_text ( out_elevation_grid ) self . filename = out_elevation_grid self . projectFile . setCard ( "ELEVATION" , out_elevation_grid , add_quotes = True ) # find outlet and add slope self . projectFile . findOutlet ( shapefile_path )
Generates an elevation grid for the GSSHA simulation from an elevation raster
359
16
4,678
def _read ( self , directory , filename , session , path , name , extension , spatial , spatialReferenceID , replaceParamFile ) : # Set file extension property self . fileExtension = extension # Dictionary of keywords/cards and parse function names KEYWORDS = { 'CONNECT' : spc . connectChunk , 'SJUNC' : spc . sjuncChunk , 'SLINK' : spc . slinkChunk } sjuncs = [ ] slinks = [ ] connections = [ ] # Parse file into chunks associated with keywords/cards with open ( path , 'r' ) as f : chunks = pt . chunk ( KEYWORDS , f ) # Parse chunks associated with each key for key , chunkList in iteritems ( chunks ) : # Parse each chunk in the chunk list for chunk in chunkList : # Call chunk specific parsers for each chunk result = KEYWORDS [ key ] ( key , chunk ) # Cases if key == 'CONNECT' : connections . append ( result ) elif key == 'SJUNC' : sjuncs . append ( result ) elif key == 'SLINK' : slinks . append ( result ) # Create GSSHAPY objects self . _createConnection ( connections ) self . _createSjunc ( sjuncs ) self . _createSlink ( slinks )
Storm Pipe Network File Read from File Method
293
8
4,679
def _write ( self , session , openFile , replaceParamFile ) : # Retrieve Connection objects and write to file connections = self . connections self . _writeConnections ( connections = connections , fileObject = openFile ) # Retrieve SuperJunction objects and write to file sjuncs = self . superJunctions self . _writeSuperJunctions ( superJunctions = sjuncs , fileObject = openFile ) # Retrieve SuperLink objects and write to file slinks = self . superLinks self . _writeSuperLinks ( superLinks = slinks , fileObject = openFile )
Storm Pipe Network File Write to File Method
125
8
4,680
def _createConnection ( self , connections ) : for c in connections : # Create GSSHAPY Connection object connection = Connection ( slinkNumber = c [ 'slinkNumber' ] , upSjuncNumber = c [ 'upSjunc' ] , downSjuncNumber = c [ 'downSjunc' ] ) # Associate Connection with StormPipeNetworkFile connection . stormPipeNetworkFile = self
Create GSSHAPY Connection Objects Method
91
9
4,681
def _createSlink ( self , slinks ) : for slink in slinks : # Create GSSHAPY SuperLink object superLink = SuperLink ( slinkNumber = slink [ 'slinkNumber' ] , numPipes = slink [ 'numPipes' ] ) # Associate SuperLink with StormPipeNetworkFile superLink . stormPipeNetworkFile = self for node in slink [ 'nodes' ] : # Create GSSHAPY SuperNode objects superNode = SuperNode ( nodeNumber = node [ 'nodeNumber' ] , groundSurfaceElev = node [ 'groundSurfaceElev' ] , invertElev = node [ 'invertElev' ] , manholeSA = node [ 'manholeSA' ] , nodeInletCode = node [ 'inletCode' ] , cellI = node [ 'cellI' ] , cellJ = node [ 'cellJ' ] , weirSideLength = node [ 'weirSideLength' ] , orificeDiameter = node [ 'orificeDiameter' ] ) # Associate SuperNode with SuperLink superNode . superLink = superLink for p in slink [ 'pipes' ] : # Create GSSHAPY Pipe objects pipe = Pipe ( pipeNumber = p [ 'pipeNumber' ] , xSecType = p [ 'xSecType' ] , diameterOrHeight = p [ 'diameterOrHeight' ] , width = p [ 'width' ] , slope = p [ 'slope' ] , roughness = p [ 'roughness' ] , length = p [ 'length' ] , conductance = p [ 'conductance' ] , drainSpacing = p [ 'drainSpacing' ] ) # Associate Pipe with SuperLink pipe . superLink = superLink
Create GSSHAPY SuperLink Pipe and SuperNode Objects Method
388
14
4,682
def _createSjunc ( self , sjuncs ) : for sjunc in sjuncs : # Create GSSHAPY SuperJunction object superJunction = SuperJunction ( sjuncNumber = sjunc [ 'sjuncNumber' ] , groundSurfaceElev = sjunc [ 'groundSurfaceElev' ] , invertElev = sjunc [ 'invertElev' ] , manholeSA = sjunc [ 'manholeSA' ] , inletCode = sjunc [ 'inletCode' ] , linkOrCellI = sjunc [ 'linkOrCellI' ] , nodeOrCellJ = sjunc [ 'nodeOrCellJ' ] , weirSideLength = sjunc [ 'weirSideLength' ] , orificeDiameter = sjunc [ 'orificeDiameter' ] ) # Associate SuperJunction with StormPipeNetworkFile superJunction . stormPipeNetworkFile = self
Create GSSHAPY SuperJunction Objects Method
215
11
4,683
def _writeConnections ( self , connections , fileObject ) : for connection in connections : fileObject . write ( 'CONNECT %s %s %s\n' % ( connection . slinkNumber , connection . upSjuncNumber , connection . downSjuncNumber ) )
Write Connections to File Method
61
6
4,684
def _writeSuperJunctions ( self , superJunctions , fileObject ) : for sjunc in superJunctions : fileObject . write ( 'SJUNC %s %.2f %.2f %.6f %s %s %s %.6f %.6f\n' % ( sjunc . sjuncNumber , sjunc . groundSurfaceElev , sjunc . invertElev , sjunc . manholeSA , sjunc . inletCode , sjunc . linkOrCellI , sjunc . nodeOrCellJ , sjunc . weirSideLength , sjunc . orificeDiameter ) )
Write SuperJunctions to File Method
150
7
4,685
def _writeSuperLinks ( self , superLinks , fileObject ) : for slink in superLinks : fileObject . write ( 'SLINK %s %s\n' % ( slink . slinkNumber , slink . numPipes ) ) for node in slink . superNodes : fileObject . write ( 'NODE %s %.2f %.2f %.6f %s %s %s %.6f %.6f\n' % ( node . nodeNumber , node . groundSurfaceElev , node . invertElev , node . manholeSA , node . nodeInletCode , node . cellI , node . cellJ , node . weirSideLength , node . orificeDiameter ) ) for pipe in slink . pipes : fileObject . write ( 'PIPE %s %s %.6f %.6f %.6f %.6f %.2f %.6f %.6f\n' % ( pipe . pipeNumber , pipe . xSecType , pipe . diameterOrHeight , pipe . width , pipe . slope , pipe . roughness , pipe . length , pipe . conductance , pipe . drainSpacing ) )
Write SuperLinks to File Method
263
6
4,686
def ziegler_nichols ( self , ku , tu , control_type = 'pid' ) : converter = dict ( p = lambda ku , tu : ( .5 * ku , 0 , 0 ) , pi = lambda ku , tu : ( .45 * ku , 1.2 * ( .45 * ku ) / tu , 0 ) , pd = lambda ku , tu : ( .8 * ku , 0 , ( .8 * ku ) * tu / 8 ) , pid = lambda ku , tu : ( .6 * ku , 2 * ( .6 * ku ) / tu , ( .6 * ku ) * tu / 8 ) , pessen = lambda ku , tu : ( .7 * ku , 2.5 * ( .7 * ku ) / tu , 3 * ( .7 * ku ) * tu / 20 ) , some_overshoot = lambda ku , tu : ( .33 * ku , 2 * ( .33 * ku ) / tu , ( .33 * ku ) * tu / 3 ) , no_overshoot = lambda ku , tu : ( .2 * ku , 2 * ( .2 * ku ) / tu , ( .2 * ku ) * tu / 3 ) ) self . kp , self . ki , self . kd = converter [ control_type . lower ( ) ] ( ku , tu )
ku = ultimate gain tu = period of oscillation at ultimate gain
315
13
4,687
def datasetHeaderChunk ( key , lines ) : KEYWORDS = ( 'DATASET' , 'OBJTYPE' , 'VECTYPE' , 'BEGSCL' , 'BEGVEC' , 'OBJID' , 'ND' , 'NC' , 'NAME' ) TYPE_KEYS = ( 'BEGSCL' , 'BEGVEC' ) result = { 'type' : None , 'numberData' : None , 'numberCells' : None , 'name' : None , 'objectID' : None , 'objectType' : None , 'vectorType' : None } chunks = pt . chunk ( KEYWORDS , lines ) for key , chunkList in iteritems ( chunks ) : for chunk in chunkList : schunk = pt . splitLine ( chunk [ 0 ] ) if key == 'ND' : result [ 'numberData' ] = int ( schunk [ 1 ] ) elif key == 'NC' : result [ 'numberCells' ] = int ( schunk [ 1 ] ) elif key == 'NAME' : result [ 'name' ] = schunk [ 1 ] elif key == 'OBJID' : result [ 'objectID' ] = int ( schunk [ 1 ] ) elif key == 'OBJTYPE' : result [ 'objectType' ] = schunk [ 1 ] elif key == 'VECTYPE' : result [ 'vectorType' ] = schunk [ 1 ] elif key in TYPE_KEYS : result [ 'type' ] = schunk [ 0 ] return result
Process the dataset header
346
4
4,688
def datasetScalarTimeStepChunk ( lines , numberColumns , numberCells ) : END_DATASET_TAG = 'ENDDS' # Define the result object result = { 'iStatus' : None , 'timestamp' : None , 'cellArray' : None , 'rasterText' : None } # Split the chunks timeStep = pt . splitLine ( lines . pop ( 0 ) ) # Extract cells, ignoring the status indicators startCellsIndex = numberCells # Handle case when status cells are not included (istat = 0) iStatus = int ( timeStep [ 1 ] ) if iStatus == 0 : startCellsIndex = 0 # Strip off ending dataset tag if END_DATASET_TAG in lines [ - 1 ] : lines . pop ( - 1 ) # Assemble the array string arrayString = '[[' columnCounter = 1 lenLines = len ( lines ) - 1 # Also assemble raster text field to preserve for spatial datasets rasterText = '' for index in range ( startCellsIndex , len ( lines ) ) : # Check columns condition if columnCounter % numberColumns != 0 and index != lenLines : arrayString += lines [ index ] . strip ( ) + ', ' elif columnCounter % numberColumns == 0 and index != lenLines : arrayString += lines [ index ] . strip ( ) + '], [' elif index == lenLines : arrayString += lines [ index ] . strip ( ) + ']]' # Advance counter columnCounter += 1 rasterText += lines [ index ] # Get Value Array result [ 'cellArray' ] = arrayString result [ 'rasterText' ] = rasterText # Assign Result result [ 'iStatus' ] = iStatus result [ 'timestamp' ] = float ( timeStep [ 2 ] ) return result
Process the time step chunks for scalar datasets
394
9
4,689
def save_dispatcher ( dsp , path ) : import dill with open ( path , 'wb' ) as f : dill . dump ( dsp , f )
Write Dispatcher object in Python pickle format .
38
11
4,690
def save_default_values ( dsp , path ) : import dill with open ( path , 'wb' ) as f : dill . dump ( dsp . default_values , f )
Write Dispatcher default values in Python pickle format .
42
12
4,691
def load_default_values ( dsp , path ) : import dill # noinspection PyArgumentList with open ( path , 'rb' ) as f : dsp . __init__ ( dmap = dsp . dmap , default_values = dill . load ( f ) )
Load Dispatcher default values in Python pickle format .
64
12
4,692
def save_map ( dsp , path ) : import dill with open ( path , 'wb' ) as f : dill . dump ( dsp . dmap , f )
Write Dispatcher graph object in Python pickle format .
39
12
4,693
def chunk ( keywords , lines ) : chunks = dict ( ) chunk = [ ] # Create an empty dictionary using all the keywords for keyword in keywords : chunks [ keyword ] = [ ] # Populate dictionary with lists of chunks associated # with the keywords in the list for line in lines : if line . strip ( ) : token = line . split ( ) [ 0 ] if token in keywords : chunk = [ line ] chunks [ token ] . append ( chunk ) else : chunk . append ( line ) return chunks
Divide a file into chunks between key words in the list
105
12
4,694
def valueReadPreprocessor ( valueString , replaceParamsFile = None ) : if type ( valueString ) is bool : log . warning ( "Only numerical variable types can be handled by the valueReadPreprocessor function." ) return valueString # Default processedValue = valueString # Check for replacement variables if replaceParamsFile is not None and valueString is not None : if '[' in valueString or ']' in valueString : # Set default value processedValue = '{0}' . format ( REPLACE_NO_VALUE ) # Find the matching parameter and return the negative of the id for targetParam in replaceParamsFile . targetParameters : if targetParam . targetVariable == valueString : processedValue = '{0}' . format ( - 1 * targetParam . id ) break return processedValue
Apply global pre - processing to values during reading throughout the project .
169
13
4,695
def valueWritePreprocessor ( valueString , replaceParamsFile = None ) : if type ( valueString ) is bool : log . warning ( "Only numerical variable types can be handled by the valueReadPreprocessor function." ) return valueString # Default variableString = valueString # Check for replacement variables if replaceParamsFile is not None : # Set Default if variableString == REPLACE_NO_VALUE : variableString = '[NO_VARIABLE]' else : try : number = int ( valueString ) if number < 0 : parameterID = number * - 1 # Find the matching parameter for targetParam in replaceParamsFile . targetParameters : if targetParam . id == parameterID : variableString = targetParam . targetVariable break except : pass return variableString
Look up variable name in replace param file for the negative id given and return it .
160
17
4,696
def run ( self , dataset_path ) : features = self . _generate_features ( self . _feature_extractors ) features . to_csv ( dataset_path )
Run all FeatureExtractors and output results to CSV .
39
12
4,697
def _generate_features ( self , feature_extractors ) : results = [ pd . DataFrame ( ) ] n_ext = len ( feature_extractors ) for i , extractor in enumerate ( feature_extractors ) : log . info ( "generating: '%s' (%d/%d)" , extractor . name , i + 1 , n_ext ) cached_extractor = self . _cache [ extractor . name ] if extractor . same ( cached_extractor ) : log . info ( 'pulling from cache' ) extractor = cached_extractor else : log . info ( 'running...' ) extractor . extract ( ) results . append ( extractor . result ) if self . cache_path : self . _cache [ extractor . name ] = extractor if self . cache_path : with open ( self . cache_path , 'wb' ) as f : pickle . dump ( self . _cache , f ) return pd . concat ( results , axis = 1 )
Run all FeatureExtractors and record results in a key - value format .
226
16
4,698
def read ( self , directory , filename , session , spatial = False , spatialReferenceID = 4236 , replaceParamFile = None , * * kwargs ) : # Read parameter derivatives path = os . path . join ( directory , filename ) filename_split = filename . split ( '.' ) name = filename_split [ 0 ] # Default file extension extension = '' if len ( filename_split ) >= 2 : extension = filename_split [ - 1 ] if os . path . isfile ( path ) : # Add self to session session . add ( self ) # Read self . _read ( directory , filename , session , path , name , extension , spatial , spatialReferenceID , replaceParamFile , * * kwargs ) # Commit to database self . _commit ( session , self . COMMIT_ERROR_MESSAGE ) else : # Rollback the session if the file doesn't exist session . rollback ( ) # Print warning log . warning ( 'Could not find file named {0}. File not read.' . format ( filename ) )
Generic read file into database method .
221
7
4,699
def write ( self , session , directory , name , replaceParamFile = None , * * kwargs ) : # Assemble Path to file name_split = name . split ( '.' ) name = name_split [ 0 ] # Default extension extension = '' if len ( name_split ) >= 2 : extension = name_split [ - 1 ] # Run name preprocessor method if present try : name = self . _namePreprocessor ( name ) except : 'DO NOTHING' if extension == '' : filename = '{0}.{1}' . format ( name , self . fileExtension ) else : filename = '{0}.{1}' . format ( name , extension ) filePath = os . path . join ( directory , filename ) with io_open ( filePath , 'w' ) as openFile : # Write Lines self . _write ( session = session , openFile = openFile , replaceParamFile = replaceParamFile , * * kwargs )
Write from database back to file .
209
7