idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
20,500
def unlink ( self , definition , doc1 , doc2 ) : links = self . database [ definition ] . fetchByExample ( { "_from" : doc1 . _id , "_to" : doc2 . _id } , batchSize = 100 ) for l in links : self . deleteEdge ( l )
deletes all links between doc1 and doc2
67
10
20,501
def deleteEdge ( self , edge , waitForSync = False ) : url = "%s/edge/%s" % ( self . URL , edge . _id ) r = self . connection . session . delete ( url , params = { 'waitForSync' : waitForSync } ) if r . status_code == 200 or r . status_code == 202 : return True raise DeletionError ( "Unable to delete edge, %s" % edge . _id , r . json ( ) )
removes an edge from the graph
109
7
20,502
def delete ( self , _key ) : try : doc = self . cacheStore [ _key ] doc . prev . nextDoc = doc . nextDoc doc . nextDoc . prev = doc . prev del ( self . cacheStore [ _key ] ) except KeyError : raise KeyError ( "Document with _key %s is not available in cache" % _key )
removes a document from the cache
78
7
20,503
def getChain ( self ) : l = [ ] h = self . head while h : l . append ( h . _key ) h = h . nextDoc return l
returns a list of keys representing the chain of documents
36
11
20,504
def validate ( self , value ) : for v in self . validators : v . validate ( value ) return True
checks the validity of value given the lits of validators
24
12
20,505
def getCollectionClass ( cls , name ) : try : return cls . collectionClasses [ name ] except KeyError : raise KeyError ( "There is no Collection Class of type: '%s'; currently supported values: [%s]" % ( name , ', ' . join ( getCollectionClasses ( ) . keys ( ) ) ) )
Return the class object of a collection given its name
74
10
20,506
def isDocumentCollection ( cls , name ) : try : col = cls . getCollectionClass ( name ) return issubclass ( col , Collection ) except KeyError : return False
return true or false wether name is the name of a document collection .
39
15
20,507
def isEdgeCollection ( cls , name ) : try : col = cls . getCollectionClass ( name ) return issubclass ( col , Edges ) except KeyError : return False
return true or false wether name is the name of an edge collection .
40
15
20,508
def getIndexes ( self ) : url = "%s/index" % self . database . URL r = self . connection . session . get ( url , params = { "collection" : self . name } ) data = r . json ( ) for ind in data [ "indexes" ] : self . indexes [ ind [ "type" ] ] [ ind [ "id" ] ] = Index ( collection = self , infos = ind ) return self . indexes
Fills self . indexes with all the indexes associates with the collection and returns it
98
16
20,509
def delete ( self ) : r = self . connection . session . delete ( self . URL ) data = r . json ( ) if not r . status_code == 200 or data [ "error" ] : raise DeletionError ( data [ "errorMessage" ] , data )
deletes the collection from the database
60
7
20,510
def createDocument ( self , initDict = None ) : if initDict is not None : return self . createDocument_ ( initDict ) else : if self . _validation [ "on_load" ] : self . _validation [ "on_load" ] = False return self . createDocument_ ( self . defaultDocument ) self . _validation [ "on_load" ] = True else : return self . createDocument_ ( self . defaultDocument )
create and returns a document populated with the defaults or with the values in initDict
102
17
20,511
def createDocument_ ( self , initDict = None ) : if initDict is None : initV = { } else : initV = initDict return self . documentClass ( self , initV )
create and returns a completely empty document or one populated with initDict
45
14
20,512
def ensureHashIndex ( self , fields , unique = False , sparse = True , deduplicate = False ) : data = { "type" : "hash" , "fields" : fields , "unique" : unique , "sparse" : sparse , "deduplicate" : deduplicate } ind = Index ( self , creationData = data ) self . indexes [ "hash" ] [ ind . infos [ "id" ] ] = ind return ind
Creates a hash index if it does not already exist and returns it
101
14
20,513
def ensureGeoIndex ( self , fields ) : data = { "type" : "geo" , "fields" : fields , } ind = Index ( self , creationData = data ) self . indexes [ "geo" ] [ ind . infos [ "id" ] ] = ind return ind
Creates a geo index if it does not already exist and returns it
65
14
20,514
def ensureFulltextIndex ( self , fields , minLength = None ) : data = { "type" : "fulltext" , "fields" : fields , } if minLength is not None : data [ "minLength" ] = minLength ind = Index ( self , creationData = data ) self . indexes [ "fulltext" ] [ ind . infos [ "id" ] ] = ind return ind
Creates a fulltext index if it does not already exist and returns it
87
15
20,515
def validatePrivate ( self , field , value ) : if field not in self . arangoPrivates : raise ValueError ( "%s is not a private field of collection %s" % ( field , self ) ) if field in self . _fields : self . _fields [ field ] . validate ( value ) return True
validate a private field value
67
6
20,516
def simpleQuery ( self , queryType , rawResults = False , * * queryArgs ) : return SimpleQuery ( self , queryType , rawResults , * * queryArgs )
General interface for simple queries . queryType can be something like all by - example etc ... everything is in the arango doc . If rawResults the query will return dictionaries instead of Document objetcs .
37
42
20,517
def action ( self , method , action , * * params ) : fct = getattr ( self . connection . session , method . lower ( ) ) r = fct ( self . URL + "/" + action , params = params ) return r . json ( )
a generic fct for interacting everything that doesn t have an assigned fct
56
15
20,518
def bulkSave ( self , docs , onDuplicate = "error" , * * params ) : payload = [ ] for d in docs : if type ( d ) is dict : payload . append ( json . dumps ( d , default = str ) ) else : try : payload . append ( d . toJson ( ) ) except Exception as e : payload . append ( json . dumps ( d . getStore ( ) , default = str ) ) payload = '\n' . join ( payload ) params [ "type" ] = "documents" params [ "onDuplicate" ] = onDuplicate params [ "collection" ] = self . name URL = "%s/import" % self . database . URL r = self . connection . session . post ( URL , params = params , data = payload ) data = r . json ( ) if ( r . status_code == 201 ) and "error" not in data : return True else : if data [ "errors" ] > 0 : raise UpdateError ( "%d documents could not be created" % data [ "errors" ] , data ) return data [ "updated" ] + data [ "created" ]
Parameter docs must be either an iterrable of documents or dictionnaries . This function will return the number of documents created and updated and will raise an UpdateError exception if there s at least one error . params are any parameters from arango s documentation
249
51
20,519
def getEdges ( self , vertex , inEdges = True , outEdges = True , rawResults = False ) : if isinstance ( vertex , Document ) : vId = vertex . _id elif ( type ( vertex ) is str ) or ( type ( vertex ) is bytes ) : vId = vertex else : raise ValueError ( "Vertex is neither a Document nor a String" ) params = { "vertex" : vId } if inEdges and outEdges : pass elif inEdges : params [ "direction" ] = "in" elif outEdges : params [ "direction" ] = "out" else : raise ValueError ( "inEdges, outEdges or both must have a boolean value" ) r = self . connection . session . get ( self . edgesURL , params = params ) data = r . json ( ) if r . status_code == 200 : if not rawResults : ret = [ ] for e in data [ "edges" ] : ret . append ( Edge ( self , e ) ) return ret else : return data [ "edges" ] else : raise CreationError ( "Unable to return edges for vertex: %s" % vId , data )
returns in out or both edges liked to a given document . vertex can be either a Document object or a string for an _id . If rawResults a arango results will be return as fetched if false will return a liste of Edge objects
263
51
20,520
def reloadCollections ( self ) : r = self . connection . session . get ( self . collectionsURL ) data = r . json ( ) if r . status_code == 200 : self . collections = { } for colData in data [ "result" ] : colName = colData [ 'name' ] if colData [ 'isSystem' ] : colObj = COL . SystemCollection ( self , colData ) else : try : colClass = COL . getCollectionClass ( colName ) colObj = colClass ( self , colData ) except KeyError : if colData [ "type" ] == CONST . COLLECTION_EDGE_TYPE : colObj = COL . Edges ( self , colData ) elif colData [ "type" ] == CONST . COLLECTION_DOCUMENT_TYPE : colObj = COL . Collection ( self , colData ) else : print ( ( "Warning!! Collection of unknown type: %d, trying to load it as Collection nonetheless." % colData [ "type" ] ) ) colObj = COL . Collection ( self , colData ) self . collections [ colName ] = colObj else : raise UpdateError ( data [ "errorMessage" ] , data )
reloads the collection list .
261
6
20,521
def reloadGraphs ( self ) : r = self . connection . session . get ( self . graphsURL ) data = r . json ( ) if r . status_code == 200 : self . graphs = { } for graphData in data [ "graphs" ] : try : self . graphs [ graphData [ "_key" ] ] = GR . getGraphClass ( graphData [ "_key" ] ) ( self , graphData ) except KeyError : self . graphs [ graphData [ "_key" ] ] = Graph ( self , graphData ) else : raise UpdateError ( data [ "errorMessage" ] , data )
reloads the graph list
132
5
20,522
def createGraph ( self , name , createCollections = True , isSmart = False , numberOfShards = None , smartGraphAttribute = None ) : def _checkCollectionList ( lst ) : for colName in lst : if not COL . isCollection ( colName ) : raise ValueError ( "'%s' is not a defined Collection" % colName ) graphClass = GR . getGraphClass ( name ) ed = [ ] for e in graphClass . _edgeDefinitions : if not COL . isEdgeCollection ( e . edgesCollection ) : raise ValueError ( "'%s' is not a defined Edge Collection" % e . edgesCollection ) _checkCollectionList ( e . fromCollections ) _checkCollectionList ( e . toCollections ) ed . append ( e . toJson ( ) ) _checkCollectionList ( graphClass . _orphanedCollections ) options = { } if numberOfShards : options [ 'numberOfShards' ] = numberOfShards if smartGraphAttribute : options [ 'smartGraphAttribute' ] = smartGraphAttribute payload = { "name" : name , "edgeDefinitions" : ed , "orphanCollections" : graphClass . _orphanedCollections } if isSmart : payload [ 'isSmart' ] = isSmart if options : payload [ 'options' ] = options payload = json . dumps ( payload ) r = self . connection . session . post ( self . graphsURL , data = payload ) data = r . json ( ) if r . status_code == 201 or r . status_code == 202 : self . graphs [ name ] = graphClass ( self , data [ "graph" ] ) else : raise CreationError ( data [ "errorMessage" ] , data ) return self . graphs [ name ]
Creates a graph and returns it . name must be the name of a class inheriting from Graph . Checks will be performed to make sure that every collection mentionned in the edges definition exist . Raises a ValueError in case of a non - existing collection .
380
53
20,523
def validateAQLQuery ( self , query , bindVars = None , options = None ) : if bindVars is None : bindVars = { } if options is None : options = { } payload = { 'query' : query , 'bindVars' : bindVars , 'options' : options } r = self . connection . session . post ( self . cursorsURL , data = json . dumps ( payload , default = str ) ) data = r . json ( ) if r . status_code == 201 and not data [ "error" ] : return data else : raise AQLQueryError ( data [ "errorMessage" ] , query , data )
returns the server answer is the query is valid . Raises an AQLQueryError if not
143
20
20,524
def transaction ( self , collections , action , waitForSync = False , lockTimeout = None , params = None ) : payload = { "collections" : collections , "action" : action , "waitForSync" : waitForSync } if lockTimeout is not None : payload [ "lockTimeout" ] = lockTimeout if params is not None : payload [ "params" ] = params self . connection . reportStart ( action ) r = self . connection . session . post ( self . transactionURL , data = json . dumps ( payload , default = str ) ) self . connection . reportItem ( ) data = r . json ( ) if ( r . status_code == 200 or r . status_code == 201 or r . status_code == 202 ) and not data . get ( "error" ) : return data else : raise TransactionError ( data [ "errorMessage" ] , action , data )
Execute a server - side transaction
191
7
20,525
def getPatches ( self ) : if not self . mustValidate : return self . getStore ( ) res = { } res . update ( self . patchStore ) for k , v in self . subStores . items ( ) : res [ k ] = v . getPatches ( ) return res
get patches as a dictionary
65
5
20,526
def getStore ( self ) : res = { } res . update ( self . store ) for k , v in self . subStores . items ( ) : res [ k ] = v . getStore ( ) return res
get the inner store as dictionary
47
6
20,527
def validateField ( self , field ) : if field not in self . validators and not self . collection . _validation [ 'allow_foreign_fields' ] : raise SchemaViolation ( self . collection . __class__ , field ) if field in self . store : if isinstance ( self . store [ field ] , DocumentStore ) : return self [ field ] . validate ( ) if field in self . patchStore : return self . validators [ field ] . validate ( self . patchStore [ field ] ) else : try : return self . validators [ field ] . validate ( self . store [ field ] ) except ValidationError as e : raise ValidationError ( "'%s' -> %s" % ( field , str ( e ) ) ) except AttributeError : if isinstance ( self . validators [ field ] , dict ) and not isinstance ( self . store [ field ] , dict ) : raise ValueError ( "Validator expected a sub document for field '%s', got '%s' instead" % ( field , self . store [ field ] ) ) else : raise return True
Validatie a field
237
5
20,528
def validate ( self ) : if not self . mustValidate : return True res = { } for field in self . validators . keys ( ) : try : if isinstance ( self . validators [ field ] , dict ) and field not in self . store : self . store [ field ] = DocumentStore ( self . collection , validators = self . validators [ field ] , initDct = { } , subStore = True , validateInit = self . validateInit ) self . validateField ( field ) except InvalidDocument as e : res . update ( e . errors ) except ( ValidationError , SchemaViolation ) as e : res [ field ] = str ( e ) if len ( res ) > 0 : raise InvalidDocument ( res ) return True
Validate the whole document
161
5
20,529
def set ( self , dct ) : # if not self.mustValidate : # self.store = dct # self.patchStore = dct # return for field , value in dct . items ( ) : if field not in self . collection . arangoPrivates : if isinstance ( value , dict ) : if field in self . validators and isinstance ( self . validators [ field ] , dict ) : vals = self . validators [ field ] else : vals = { } self [ field ] = DocumentStore ( self . collection , validators = vals , initDct = value , patch = self . patching , subStore = True , validateInit = self . validateInit ) self . subStores [ field ] = self . store [ field ] else : self [ field ] = value
Set the store using a dictionary
175
6
20,530
def reset ( self , collection , jsonFieldInit = None ) : if not jsonFieldInit : jsonFieldInit = { } self . collection = collection self . connection = self . collection . connection self . documentsURL = self . collection . documentsURL self . URL = None self . setPrivates ( jsonFieldInit ) self . _store = DocumentStore ( self . collection , validators = self . collection . _fields , initDct = jsonFieldInit ) if self . collection . _validation [ 'on_load' ] : self . validate ( ) self . modified = True
replaces the current values in the document by those in jsonFieldInit
122
14
20,531
def validate ( self ) : self . _store . validate ( ) for pField in self . collection . arangoPrivates : self . collection . validatePrivate ( pField , getattr ( self , pField ) )
validate the document
46
4
20,532
def setPrivates ( self , fieldDict ) : for priv in self . privates : if priv in fieldDict : setattr ( self , priv , fieldDict [ priv ] ) else : setattr ( self , priv , None ) if self . _id is not None : self . URL = "%s/%s" % ( self . documentsURL , self . _id )
will set self . _id self . _rev and self . _key field .
83
17
20,533
def patch ( self , keepNull = True , * * docArgs ) : if self . URL is None : raise ValueError ( "Cannot patch a document that was not previously saved" ) payload = self . _store . getPatches ( ) if self . collection . _validation [ 'on_save' ] : self . validate ( ) if len ( payload ) > 0 : params = dict ( docArgs ) params . update ( { 'collection' : self . collection . name , 'keepNull' : keepNull } ) payload = json . dumps ( payload , default = str ) r = self . connection . session . patch ( self . URL , params = params , data = payload ) data = r . json ( ) if ( r . status_code == 201 or r . status_code == 202 ) and "error" not in data : self . _rev = data [ '_rev' ] else : raise UpdateError ( data [ 'errorMessage' ] , data ) self . modified = False self . _store . resetPatch ( )
Saves the document by only updating the modified fields . The default behaviour concening the keepNull parameter is the opposite of ArangoDB s default Null values won t be ignored Use docArgs for things such as waitForSync = True
221
47
20,534
def delete ( self ) : if self . URL is None : raise DeletionError ( "Can't delete a document that was not saved" ) r = self . connection . session . delete ( self . URL ) data = r . json ( ) if ( r . status_code != 200 and r . status_code != 202 ) or 'error' in data : raise DeletionError ( data [ 'errorMessage' ] , data ) self . reset ( self . collection ) self . modified = True
deletes the document from the database
106
7
20,535
def getEdges ( self , edges , inEdges = True , outEdges = True , rawResults = False ) : try : return edges . getEdges ( self , inEdges , outEdges , rawResults ) except AttributeError : raise AttributeError ( "%s does not seem to be a valid Edges object" % edges )
returns in out or both edges linked to self belonging the collection edges . If rawResults a arango results will be return as fetched if false will return a liste of Edge objects
75
38
20,536
def getStore ( self ) : store = self . _store . getStore ( ) for priv in self . privates : v = getattr ( self , priv ) if v : store [ priv ] = v return store
return the store in a dict format
46
7
20,537
def links ( self , fromVertice , toVertice , * * edgeArgs ) : if isinstance ( fromVertice , Document ) or isinstance ( getattr ( fromVertice , 'document' , None ) , Document ) : if not fromVertice . _id : fromVertice . save ( ) self . _from = fromVertice . _id elif ( type ( fromVertice ) is bytes ) or ( type ( fromVertice ) is str ) : self . _from = fromVertice elif not self . _from : raise CreationError ( 'fromVertice %s is invalid!' % str ( fromVertice ) ) if isinstance ( toVertice , Document ) or isinstance ( getattr ( toVertice , 'document' , None ) , Document ) : if not toVertice . _id : toVertice . save ( ) self . _to = toVertice . _id elif ( type ( toVertice ) is bytes ) or ( type ( toVertice ) is str ) : self . _to = toVertice elif not self . _to : raise CreationError ( 'toVertice %s is invalid!' % str ( toVertice ) ) self . save ( * * edgeArgs )
An alias to save that updates the _from and _to attributes . fromVertice and toVertice can be either strings or documents . It they are unsaved documents they will be automatically saved .
266
40
20,538
def _set ( self , jsonData ) : self [ "username" ] = jsonData [ "user" ] self [ "active" ] = jsonData [ "active" ] self [ "extra" ] = jsonData [ "extra" ] try : self [ "changePassword" ] = jsonData [ "changePassword" ] except Exception as e : pass # self["changePassword"] = "" try : self [ "password" ] = jsonData [ "passwd" ] except KeyError : self [ "password" ] = "" self . URL = "%s/user/%s" % ( self . connection . URL , self [ "username" ] )
Initialize all fields at once . If no password is specified it will be set as an empty string
140
20
20,539
def delete ( self ) : if not self . URL : raise CreationError ( "Please save user first" , None , None ) r = self . connection . session . delete ( self . URL ) if r . status_code < 200 or r . status_code > 202 : raise DeletionError ( "Unable to delete user, url: %s, status: %s" % ( r . url , r . status_code ) , r . content ) self . URL = None
Permanently remove the user
103
6
20,540
def fetchAllUsers ( self , rawResults = False ) : r = self . connection . session . get ( self . URL ) if r . status_code == 200 : data = r . json ( ) if rawResults : return data [ "result" ] else : res = [ ] for resu in data [ "result" ] : u = User ( self , resu ) res . append ( u ) return res else : raise ConnectionError ( "Unable to get user list" , r . url , r . status_code )
Returns all available users . if rawResults the result will be a list of python dicts instead of User objects
113
22
20,541
def fetchUser ( self , username , rawResults = False ) : url = "%s/%s" % ( self . URL , username ) r = self . connection . session . get ( url ) if r . status_code == 200 : data = r . json ( ) if rawResults : return data [ "result" ] else : u = User ( self , data ) return u else : raise KeyError ( "Unable to get user: %s" % username )
Returns a single user . if rawResults the result will be a list of python dicts instead of User objects
100
22
20,542
def resetSession ( self , username = None , password = None , verify = True ) : self . disconnectSession ( ) self . session = AikidoSession ( username , password , verify )
resets the session
40
4
20,543
def reload ( self ) : r = self . session . get ( self . databasesURL ) data = r . json ( ) if r . status_code == 200 and not data [ "error" ] : self . databases = { } for dbName in data [ "result" ] : if dbName not in self . databases : self . databases [ dbName ] = DBHandle ( self , dbName ) else : raise ConnectionError ( data [ "errorMessage" ] , self . databasesURL , r . status_code , r . content )
Reloads the database list . Because loading a database triggers the loading of all collections and graphs within only handles are loaded when this function is called . The full databases are loaded on demand when accessed
114
39
20,544
def createDatabase ( self , name , * * dbArgs ) : dbArgs [ 'name' ] = name payload = json . dumps ( dbArgs , default = str ) url = self . URL + "/database" r = self . session . post ( url , data = payload ) data = r . json ( ) if r . status_code == 201 and not data [ "error" ] : db = Database ( self , name ) self . databases [ name ] = db return self . databases [ name ] else : raise CreationError ( data [ "errorMessage" ] , r . content )
use dbArgs for arguments other than name . for a full list of arguments please have a look at arangoDB s doc
124
25
20,545
def output ( self , args ) : print ( "SensuPlugin: {}" . format ( ' ' . join ( str ( a ) for a in args ) ) )
Print the output message .
37
5
20,546
def __make_dynamic ( self , method ) : def dynamic ( * args ) : self . plugin_info [ 'status' ] = method if not args : args = None self . output ( args ) sys . exit ( getattr ( self . exit_code , method ) ) method_lc = method . lower ( ) dynamic . __doc__ = "%s method" % method_lc dynamic . __name__ = method_lc setattr ( self , dynamic . __name__ , dynamic )
Create a method for each of the exit codes .
105
10
20,547
def __exitfunction ( self ) : if self . _hook . exit_code is None and self . _hook . exception is None : print ( "Check did not exit! You should call an exit code method." ) sys . stdout . flush ( ) os . _exit ( 1 ) elif self . _hook . exception : print ( "Check failed to run: %s, %s" % ( sys . last_type , traceback . format_tb ( sys . last_traceback ) ) ) sys . stdout . flush ( ) os . _exit ( 2 )
Method called by exit hook ensures that both an exit code and output is supplied also catches errors .
124
19
20,548
def run ( self ) : # Parse the stdin into a global event object stdin = self . read_stdin ( ) self . event = self . read_event ( stdin ) # Prepare global settings self . settings = get_settings ( ) self . api_settings = self . get_api_settings ( ) # Prepare command line arguments and self . parser = argparse . ArgumentParser ( ) # set up the 2.x to 1.x event mapping argument self . parser . add_argument ( "--map-v2-event-into-v1" , action = "store_true" , default = False , dest = "v2event" ) if hasattr ( self , 'setup' ) : self . setup ( ) ( self . options , self . remain ) = self . parser . parse_known_args ( ) # map the event if required if ( self . options . v2event or os . environ . get ( "SENSU_MAP_V2_EVENT_INTO_V1" ) ) : self . event = map_v2_event_into_v1 ( self . event ) # Filter (deprecated) and handle self . filter ( ) self . handle ( )
Set up the event object global settings and command line arguments .
263
12
20,549
def filter ( self ) : if self . deprecated_filtering_enabled ( ) : print ( 'warning: event filtering in sensu-plugin is deprecated,' + 'see http://bit.ly/sensu-plugin' ) self . filter_disabled ( ) self . filter_silenced ( ) self . filter_dependencies ( ) if self . deprecated_occurrence_filtering ( ) : print ( 'warning: occurrence filtering in sensu-plugin is' + 'deprecated, see http://bit.ly/sensu-plugin' ) self . filter_repeated ( )
Filters exit the proccess if the event should not be handled . Filtering events is deprecated and will be removed in a future release .
126
28
20,550
def bail ( self , msg ) : client_name = self . event [ 'client' ] . get ( 'name' , 'error:no-client-name' ) check_name = self . event [ 'check' ] . get ( 'name' , 'error:no-check-name' ) print ( '{}: {}/{}' . format ( msg , client_name , check_name ) ) sys . exit ( 0 )
Gracefully terminate with message
96
6
20,551
def api_request ( self , method , path ) : if not hasattr ( self , 'api_settings' ) : ValueError ( 'api.json settings not found' ) if method . lower ( ) == 'get' : _request = requests . get elif method . lower ( ) == 'post' : _request = requests . post domain = self . api_settings [ 'host' ] uri = '{}:{}/{}' . format ( domain , self . api_settings [ 'port' ] , path ) if self . api_settings . get ( 'user' ) and self . api_settings . get ( 'password' ) : auth = ( self . api_settings [ 'user' ] , self . api_settings [ 'password' ] ) else : auth = ( ) req = _request ( uri , auth = auth ) return req
Query Sensu api for information .
187
7
20,552
def event_exists ( self , client , check ) : return self . api_request ( 'get' , 'events/{}/{}' . format ( client , check ) ) . status_code == 200
Query Sensu API for event .
47
7
20,553
def filter_silenced ( self ) : stashes = [ ( 'client' , '/silence/{}' . format ( self . event [ 'client' ] [ 'name' ] ) ) , ( 'check' , '/silence/{}/{}' . format ( self . event [ 'client' ] [ 'name' ] , self . event [ 'check' ] [ 'name' ] ) ) , ( 'check' , '/silence/all/{}' . format ( self . event [ 'check' ] [ 'name' ] ) ) ] for scope , path in stashes : if self . stash_exists ( path ) : self . bail ( scope + ' alerts silenced' )
Determine whether a check is silenced and shouldn t handle .
155
13
20,554
def filter_dependencies ( self ) : dependencies = self . event [ 'check' ] . get ( 'dependencies' , None ) if dependencies is None or not isinstance ( dependencies , list ) : return for dependency in self . event [ 'check' ] [ 'dependencies' ] : if not str ( dependency ) : continue dependency_split = tuple ( dependency . split ( '/' ) ) # If there's a dependency on a check from another client, then use # that client name, otherwise assume same client. if len ( dependency_split ) == 2 : client , check = dependency_split else : client = self . event [ 'client' ] [ 'name' ] check = dependency_split [ 0 ] if self . event_exists ( client , check ) : self . bail ( 'check dependency event exists' )
Determine whether a check has dependencies .
175
9
20,555
def filter_repeated ( self ) : defaults = { 'occurrences' : 1 , 'interval' : 30 , 'refresh' : 1800 } # Override defaults with anything defined in the settings if isinstance ( self . settings [ 'sensu_plugin' ] , dict ) : defaults . update ( self . settings [ 'sensu_plugin' ] ) occurrences = int ( self . event [ 'check' ] . get ( 'occurrences' , defaults [ 'occurrences' ] ) ) interval = int ( self . event [ 'check' ] . get ( 'interval' , defaults [ 'interval' ] ) ) refresh = int ( self . event [ 'check' ] . get ( 'refresh' , defaults [ 'refresh' ] ) ) if self . event [ 'occurrences' ] < occurrences : self . bail ( 'not enough occurrences' ) if ( self . event [ 'occurrences' ] > occurrences and self . event [ 'action' ] == 'create' ) : return number = int ( refresh / interval ) if ( number == 0 or ( self . event [ 'occurrences' ] - occurrences ) % number == 0 ) : return self . bail ( 'only handling every ' + str ( number ) + ' occurrences' )
Determine whether a check is repeating .
278
9
20,556
def config_files ( ) : sensu_loaded_tempfile = os . environ . get ( 'SENSU_LOADED_TEMPFILE' ) sensu_config_files = os . environ . get ( 'SENSU_CONFIG_FILES' ) sensu_v1_config = '/etc/sensu/config.json' sensu_v1_confd = '/etc/sensu/conf.d' if sensu_loaded_tempfile and os . path . isfile ( sensu_loaded_tempfile ) : with open ( sensu_loaded_tempfile , 'r' ) as tempfile : contents = tempfile . read ( ) return contents . split ( ':' ) elif sensu_config_files : return sensu_config_files . split ( ':' ) else : files = [ ] filenames = [ ] if os . path . isfile ( sensu_v1_config ) : files = [ sensu_v1_config ] if os . path . isdir ( sensu_v1_confd ) : filenames = [ f for f in os . listdir ( sensu_v1_confd ) if os . path . splitext ( f ) [ 1 ] == '.json' ] for filename in filenames : files . append ( '{}/{}' . format ( sensu_v1_confd , filename ) ) return files
Get list of currently used config files .
317
8
20,557
def get_settings ( ) : settings = { } for config_file in config_files ( ) : config_contents = load_config ( config_file ) if config_contents is not None : settings = deep_merge ( settings , config_contents ) return settings
Get all currently loaded settings .
60
6
20,558
def load_config ( filename ) : try : with open ( filename , 'r' ) as config_file : return json . loads ( config_file . read ( ) ) except IOError : pass
Read contents of config file .
42
6
20,559
def deep_merge ( dict_one , dict_two ) : merged = dict_one . copy ( ) for key , value in dict_two . items ( ) : # value is equivalent to dict_two[key] if ( key in dict_one and isinstance ( dict_one [ key ] , dict ) and isinstance ( value , dict ) ) : merged [ key ] = deep_merge ( dict_one [ key ] , value ) elif ( key in dict_one and isinstance ( dict_one [ key ] , list ) and isinstance ( value , list ) ) : merged [ key ] = list ( set ( dict_one [ key ] + value ) ) else : merged [ key ] = value return merged
Deep merge two dicts .
157
6
20,560
def map_v2_event_into_v1 ( event ) : # return the event if it has already been mapped if "v2_event_mapped_into_v1" in event : return event # Trigger mapping code if enity exists and client does not if not bool ( event . get ( 'client' ) ) and "entity" in event : event [ 'client' ] = event [ 'entity' ] # Fill in missing client attributes if "name" not in event [ 'client' ] : event [ 'client' ] [ 'name' ] = event [ 'entity' ] [ 'id' ] if "subscribers" not in event [ 'client' ] : event [ 'client' ] [ 'subscribers' ] = event [ 'entity' ] [ 'subscriptions' ] # Fill in renamed check attributes expected in 1.4 event if "subscribers" not in event [ 'check' ] : event [ 'check' ] [ 'subscribers' ] = event [ 'check' ] [ 'subscriptions' ] if "source" not in event [ 'check' ] : event [ 'check' ] [ 'source' ] = event [ 'check' ] [ 'proxy_entity_id' ] # Mimic 1.4 event action based on 2.0 event state # action used in logs and fluentd plugins handlers action_state_mapping = { 'flapping' : 'flapping' , 'passing' : 'resolve' , 'failing' : 'create' } if "state" in event [ 'check' ] : state = event [ 'check' ] [ 'state' ] else : state = "unknown::2.0_event" if "action" not in event and state . lower ( ) in action_state_mapping : event [ 'action' ] = action_state_mapping [ state . lower ( ) ] else : event [ 'action' ] = state # Mimic 1.4 event history based on 2.0 event history if "history" in event [ 'check' ] : # save the original history event [ 'check' ] [ 'history_v2' ] = deepcopy ( event [ 'check' ] [ 'history' ] ) legacy_history = [ ] for history in event [ 'check' ] [ 'history' ] : if isinstance ( history [ 'status' ] , int ) : legacy_history . append ( str ( history [ 'status' ] ) ) else : legacy_history . append ( "3" ) event [ 'check' ] [ 'history' ] = legacy_history # Setting flag indicating this function has already been called event [ 'v2_event_mapped_into_v1' ] = True # return the updated event return event
Helper method to convert Sensu 2 . x event into Sensu 1 . x event .
596
18
20,561
def check_name ( self , name = None ) : if name : self . plugin_info [ 'check_name' ] = name if self . plugin_info [ 'check_name' ] is not None : return self . plugin_info [ 'check_name' ] return self . __class__ . __name__
Checks the plugin name and sets it accordingly . Uses name if specified class name if not set .
69
20
20,562
def sampled_logs ( self , logs_limit = - 1 ) : logs_count = len ( self . logs ) if logs_limit == - 1 or logs_count <= logs_limit : return self . logs elif logs_limit == 0 : return [ ] elif logs_limit == 1 : return [ self . logs [ - 1 ] ] else : def get_sampled_log ( idx ) : # always include the first and last element of `self.logs` return self . logs [ idx * ( logs_count - 1 ) // ( logs_limit - 1 ) ] return [ get_sampled_log ( i ) for i in range ( logs_limit ) ]
Return up to logs_limit logs .
149
8
20,563
def serialize_with_sampled_logs ( self , logs_limit = - 1 ) : return { 'id' : self . id , 'pathName' : self . path_name , 'name' : self . name , 'isUnregistered' : self . is_unregistered , 'logs' : [ log . serialize for log in self . sampled_logs ( logs_limit ) ] , 'args' : self . args . serialize if self . args is not None else [ ] , 'commands' : [ cmd . serialize for cmd in self . commands ] , 'snapshots' : [ cmd . serialize for cmd in self . snapshots ] , 'logModifiedAt' : self . log_modified_at . isoformat ( ) }
serialize a result with up to logs_limit logs .
167
12
20,564
def reporter ( prefix = None , out = None , subdir = '' , timeout = 5 , * * kwargs ) : report = _Reporter ( prefix , out , subdir , * * kwargs ) yield report report . save ( timeout )
Summary media assets to visualize .
54
6
20,565
def audio ( audio , sample_rate , name = None , out = None , subdir = '' , timeout = 5 , * * kwargs ) : from chainerui . report . audio_report import check_available if not check_available ( ) : return from chainerui . report . audio_report import report as _audio out_root = _chainerui_asset_observer . get_outpath ( out ) out_path = os . path . join ( out_root , subdir ) if not os . path . isdir ( out_path ) : os . makedirs ( out_path ) col_name = name if col_name is None : col_name = 'audio' filename , created_at = _audio ( audio , sample_rate , out_path , col_name ) value = kwargs value [ 'timestamp' ] = created_at . isoformat ( ) value [ 'audios' ] = { col_name : os . path . join ( subdir , filename ) } _chainerui_asset_observer . add ( value ) _chainerui_asset_observer . save ( out_root , timeout )
summary audio files to listen on a browser .
256
9
20,566
def audio ( self , audio , sample_rate , name = None , subdir = '' ) : from chainerui . report . audio_report import check_available if not check_available ( ) : return from chainerui . report . audio_report import report as _audio col_name = self . get_col_name ( name , 'audio' ) out_dir , rel_out_dir = self . get_subdir ( subdir ) filename , _ = _audio ( audio , sample_rate , out_dir , col_name ) self . audios [ col_name ] = os . path . join ( rel_out_dir , filename ) self . count += 1
Summary audio to listen on web browser .
147
8
20,567
def create ( cls , path_name = None , name = None , crawlable = True ) : project = cls ( path_name , name , crawlable ) db . session . add ( project ) db . session . commit ( ) return collect_results ( project , force = True )
initialize an instance and save it to db .
62
10
20,568
def collect_assets ( result , force = False ) : path_name = result . path_name info_path = os . path . join ( path_name , summary . CHAINERUI_ASSETS_METAFILE_NAME ) if not os . path . isfile ( info_path ) : return start_idx = len ( result . assets ) file_modified_at = datetime . datetime . fromtimestamp ( os . path . getmtime ( info_path ) ) if start_idx > 0 : if result . assets [ - 1 ] . file_modified_at == file_modified_at : return with open ( info_path , 'r' ) as f : info_list = json . load ( f , object_pairs_hook = OrderedDict ) if len ( info_list ) < start_idx : start_idx = 0 result . assets = [ ] for base_info in info_list [ start_idx : ] : asset_path = base_info . pop ( 'images' , { } ) asset_path . update ( base_info . pop ( 'audios' , { } ) ) asset = Asset . create ( result_id = result . id , summary = base_info , file_modified_at = file_modified_at ) for key , path in asset_path . items ( ) : with open ( os . path . join ( path_name , path ) , 'rb' ) as f : data = f . read ( ) content = Bindata ( asset_id = asset . id , name = path , tag = key , content = data ) asset . content_list . append ( content ) result . assets . append ( asset ) db . session . commit ( )
collect assets from meta file
376
5
20,569
def save_args ( conditions , out_path ) : if isinstance ( conditions , argparse . Namespace ) : args = vars ( conditions ) else : args = conditions try : os . makedirs ( out_path ) except OSError : pass with tempdir ( prefix = 'args' , dir = out_path ) as tempd : path = os . path . join ( tempd , 'args.json' ) with open ( path , 'w' ) as f : json . dump ( args , f , indent = 4 ) new_path = os . path . join ( out_path , 'args' ) shutil . move ( path , new_path )
A util function to save experiment condition for job table .
146
11
20,570
def _path_insensitive ( path ) : path = str ( path ) if path == '' or os . path . exists ( path ) : return path base = os . path . basename ( path ) # may be a directory or a file dirname = os . path . dirname ( path ) suffix = '' if not base : # dir ends with a slash? if len ( dirname ) < len ( path ) : suffix = path [ : len ( path ) - len ( dirname ) ] base = os . path . basename ( dirname ) dirname = os . path . dirname ( dirname ) if not os . path . exists ( dirname ) : dirname = _path_insensitive ( dirname ) if not dirname : return # at this point, the directory exists but not the file try : # we are expecting dirname to be a directory, but it could be a file files = os . listdir ( dirname ) except OSError : return baselow = base . lower ( ) try : basefinal = next ( fl for fl in files if fl . lower ( ) == baselow ) except StopIteration : return if basefinal : return os . path . join ( dirname , basefinal ) + suffix else : return
Recursive part of path_insensitive to do the work .
268
13
20,571
def form_option ( str_opt ) : str_base = '#cmdoption-arg-' str_opt_x = str_base + str_opt . lower ( ) . replace ( '_' , '-' ) . replace ( '(' , '-' ) . replace ( ')' , '' ) return str_opt_x
generate option name based suffix for URL
71
8
20,572
def gen_url_option ( str_opt , set_site = set_site , set_runcontrol = set_runcontrol , set_initcond = set_initcond , source = 'docs' ) : dict_base = { 'docs' : URL ( 'https://suews-docs.readthedocs.io/en/latest/input_files/' ) , 'github' : URL ( 'https://github.com/Urban-Meteorology-Reading/SUEWS-Docs/raw/master/docs/source/input_files/' ) , } url_base = dict_base [ source ] url_page = choose_page ( str_opt , set_site , set_runcontrol , set_initcond , source = source ) # print('str_opt', str_opt, url_base, url_page) str_opt_x = form_option ( str_opt ) url_opt = url_base / ( url_page + str_opt_x ) return url_opt
construct a URL for option based on source
224
8
20,573
def gen_df_forcing ( path_csv_in = 'SSss_YYYY_data_tt.csv' , url_base = url_repo_input , ) -> pd . DataFrame : try : # load info from SUEWS docs repo # this is regarded as the official source urlpath_table = url_base / path_csv_in df_var_info = pd . read_csv ( urlpath_table ) except : print ( f'{urlpath_table} not existing!' ) else : # clean info dataframe df_var_forcing = df_var_info . drop ( [ 'No.' , 'Use' ] , axis = 1 ) # set index with `Column name` df_var_forcing = df_var_forcing . set_index ( 'Column Name' ) df_var_forcing . index = df_var_forcing . index . map ( lambda x : x . replace ( '`' , '' ) ) . rename ( 'variable' ) # add `Second` info df_var_forcing . loc [ 'isec' ] = 'Second [S]' return df_var_forcing
Generate description info of supy forcing data into a dataframe
246
13
20,574
def gen_df_output ( list_csv_in = [ 'SSss_YYYY_SUEWS_TT.csv' , 'SSss_DailyState.csv' , 'SSss_YYYY_snow_TT.csv' , ] , url_base = url_repo_output ) -> Path : # list of URLs list_url_table = [ url_base / table for table in list_csv_in ] try : df_var_info = pd . concat ( [ pd . read_csv ( f ) for f in list_url_table ] , sort = False ) except : for url in list_url_table : if not url . get ( ) . ok : print ( f'{url} not existing!' ) else : # clean meta info df_var_info_x = df_var_info . set_index ( 'Name' ) . loc [ : , [ 'Description' ] ] . drop_duplicates ( ) df_var_output = df_var_info_x . copy ( ) . assign ( lower = df_var_info_x . index . str . lower ( ) ) . reset_index ( ) . set_index ( 'lower' ) df_var_group = df_output_sample . columns . to_frame ( ) df_var_group . index = df_var_group . index . droplevel ( 0 ) . rename ( 'Name' ) # wrap into a dataframe df_var_output = df_var_group . merge ( df_var_output . set_index ( 'Name' ) , left_on = 'Name' , right_on = 'Name' ) . rename ( columns = { 'var' : 'variable' , 'group' : 'Group' , } ) . set_index ( 'variable' ) . drop_duplicates ( ) return df_var_output
Generate description info of supy output results into dataframe
412
12
20,575
def gen_opt_str ( ser_rec : pd . Series ) -> str : name = ser_rec . name indent = r' ' str_opt = f'.. option:: {name}' + '\n\n' for spec in ser_rec . sort_index ( ) . index : str_opt += indent + f':{spec}:' + '\n' spec_content = ser_rec [ spec ] str_opt += indent + indent + f'{spec_content}' + '\n' return str_opt
generate rst option string
118
6
20,576
def init_supy ( path_init : str ) -> pd . DataFrame : try : path_init_x = Path ( path_init ) . expanduser ( ) . resolve ( ) except FileNotFoundError : print ( '{path} does not exists!' . format ( path = path_init_x ) ) else : if path_init_x . suffix == '.nml' : # SUEWS `RunControl.nml`: df_state_init = load_InitialCond_grid_df ( path_init_x ) elif path_init_x . suffix == '.csv' : # SuPy `df_state.csv`: df_state_init = load_df_state ( path_init_x ) else : print ( '{path} is NOT a valid file to initialise SuPy!' . format ( path = path_init_x ) ) sys . exit ( ) return df_state_init
Initialise supy by loading initial model states .
204
10
20,577
def load_SampleData ( ) -> Tuple [ pandas . DataFrame , pandas . DataFrame ] : path_SampleData = Path ( path_supy_module ) / 'sample_run' path_runcontrol = path_SampleData / 'RunControl.nml' df_state_init = init_supy ( path_runcontrol ) # path_input = path_runcontrol.parent / ser_mod_cfg['fileinputpath'] df_forcing = load_forcing_grid ( path_runcontrol , df_state_init . index [ 0 ] ) return df_state_init , df_forcing
Load sample data for quickly starting a demo run .
135
10
20,578
def save_supy ( df_output : pandas . DataFrame , df_state_final : pandas . DataFrame , freq_s : int = 3600 , site : str = '' , path_dir_save : str = Path ( '.' ) , path_runcontrol : str = None , ) -> list : # get necessary information for saving procedure if path_runcontrol is not None : freq_s , path_dir_save , site = get_save_info ( path_runcontrol ) # save df_output to several files list_path_save = save_df_output ( df_output , freq_s , site , path_dir_save ) # save df_state path_state_save = save_df_state ( df_state_final , site , path_dir_save ) # update list_path_save list_path_save . append ( path_state_save ) return list_path_save
Save SuPy run results to files
206
7
20,579
def load_df_state ( path_csv : Path ) -> pd . DataFrame : df_state = pd . read_csv ( path_csv , header = [ 0 , 1 ] , index_col = [ 0 , 1 ] , parse_dates = True , infer_datetime_format = True , ) return df_state
load df_state from path_csv
73
8
20,580
def extract_var_suews ( dict_var_full : dict , var_supy : str ) -> list : x = sp . supy_load . flatten_list ( dict_var_full [ var_supy ] ) x = np . unique ( x ) x = [ xx for xx in x if xx not in [ 'base' , 'const' , '0.0' ] + [ str ( x ) for x in range ( 24 ) ] ] x = [ xx for xx in x if 'Code' not in xx ] return x
extract related SUEWS variables for a supy variable var_supy
120
16
20,581
def gen_df_site ( list_csv_in = list_table , url_base = url_repo_input_site ) -> pd . DataFrame : # list of URLs list_url_table = [ url_base / table for table in list_csv_in ] try : df_var_info = pd . concat ( [ pd . read_csv ( f ) for f in list_url_table ] ) # df_var_info = pd.concat( # [pd.read_csv(f) for f in list_url_table], # sort=False) except : for url in list_url_table : if not url . get ( ) . ok : print ( f'{url} not existing!' ) else : # clean meta info df_var_info_x = df_var_info . drop ( [ 'No.' , 'Use' ] , axis = 1 ) . set_index ( 'Column Name' ) df_var_info_x . index = df_var_info_x . index . map ( lambda x : x . replace ( '`' , '' ) ) # retrieve SUEWS-related variables dict_var_full = sp . supy_load . exp_dict_full ( sp . supy_load . dict_var2SiteSelect ) dict_var_ref_suews = { k : extract_var_suews ( dict_var_full , k ) for k in dict_var_full } df_var_ref_suews = pd . DataFrame ( { k : ', ' . join ( dict_var_ref_suews [ k ] ) for k in dict_var_ref_suews } , index = [ 0 ] ) . T . rename ( { 0 : 'SUEWS-related variables' } , axis = 1 ) # retrive supy variable description dict_var_desc = { k : '\n' . join ( df_var_info_x . loc [ v ] . values . flatten ( ) ) for k , v in dict_var_ref_suews . items ( ) } df_var_desc = pd . DataFrame ( dict_var_desc , index = [ 0 ] ) . T . rename ( columns = { 0 : 'Description' } ) # retrieve variable dimensionality df_var_dim = gen_df_dim ( df_init_sample ) df_var_site_raw = pd . concat ( [ df_var_dim , df_var_desc , df_var_ref_suews ] , axis = 1 , sort = False ) df_var_site = df_var_site_raw . filter ( items = set_input , axis = 0 ) . dropna ( ) return df_var_site
Generate description info of supy output results as a dataframe
605
13
20,582
def gen_rst_url_split_opts ( opts_str ) : if opts_str is not 'None' : list_opts = opts_str . split ( ',' ) # list_rst = [gen_rst_url_opt(opt.strip()) for opt in list_opts] list_rst = [ opt . strip ( ) for opt in list_opts ] # list_rst = [f'`{opt}`' for opt in list_rst] # more properly handle SUEWS options by explicitly adding prefix `suews`: list_rst = [ f':option:`{opt} <suews:{opt}>`' for opt in list_rst ] list_url_rst = ', ' . join ( list_rst ) else : list_url_rst = 'None' return list_url_rst
generate option list for RST docs
199
8
20,583
def gen_df_state ( list_table : list , set_initcond : set , set_runcontrol : set , set_input_runcontrol : set ) -> pd . DataFrame : # generate a base df for site characteristics related variables df_var_site = gen_df_site ( list_table ) # generate a base df for runcontrol related variables df_var_runcontrol = gen_df_runcontrol ( set_initcond , set_runcontrol , set_input_runcontrol ) # generate a base df for initial condition related variables df_var_initcond = gen_df_initcond ( set_initcond , set_runcontrol ) # further processing by modifying several entries df_var_state = proc_df_state ( df_var_site , df_var_runcontrol , df_var_initcond ) # reorganising the result: df_var_state = df_var_state . sort_index ( ) # delete duplicates while considering the variable name (stored as index) df_var_state = df_var_state . reset_index ( ) df_var_state = df_var_state . drop_duplicates ( ) # convert index back df_var_state = df_var_state . set_index ( 'variable' ) return df_var_state
generate dataframe of all state variables used by supy
287
12
20,584
def gen_df_save ( df_grid_group : pd . DataFrame ) -> pd . DataFrame : # generate df_datetime for prepending idx_dt = df_grid_group . index ser_year = pd . Series ( idx_dt . year , index = idx_dt , name = 'Year' ) ser_DOY = pd . Series ( idx_dt . dayofyear , index = idx_dt , name = 'DOY' ) ser_hour = pd . Series ( idx_dt . hour , index = idx_dt , name = 'Hour' ) ser_min = pd . Series ( idx_dt . minute , index = idx_dt , name = 'Min' ) df_datetime = pd . concat ( [ ser_year , ser_DOY , ser_hour , ser_min , ] , axis = 1 ) df_datetime [ 'Dectime' ] = ser_DOY - 1 + idx_dt . to_perioddelta ( 'd' ) . total_seconds ( ) / ( 24 * 60 * 60 ) df_save = pd . concat ( [ df_datetime , df_grid_group ] , axis = 1 ) return df_save
generate a dataframe for saving
281
7
20,585
def save_df_output ( df_output : pd . DataFrame , freq_s : int = 3600 , site : str = '' , path_dir_save : Path = Path ( '.' ) , ) -> list : list_path_save = [ ] list_group = df_output . columns . get_level_values ( 'group' ) . unique ( ) list_grid = df_output . index . get_level_values ( 'grid' ) . unique ( ) for grid in list_grid : for group in list_group : df_output_grid_group = df_output . loc [ grid , group ] . dropna ( how = 'all' , axis = 0 ) # save output at the runtime frequency (usually 5 min) # 'DailyState' group will be save a daily frequency path_save = save_df_grid_group ( df_output_grid_group , grid , group , site = site , dir_save = path_dir_save ) list_path_save . append ( path_save ) # resample output if freq_s is different from runtime freq (usually 5 min) freq_save = pd . Timedelta ( freq_s , 's' ) # resample `df_output` at `freq_save` df_rsmp = resample_output ( df_output , freq_save ) # 'DailyState' group will be dropped in `resample_output` as resampling is not needed df_rsmp = df_rsmp . drop ( columns = 'DailyState' ) list_group = df_rsmp . columns . get_level_values ( 'group' ) . unique ( ) list_grid = df_rsmp . index . get_level_values ( 'grid' ) . unique ( ) # save output at the resampling frequency for grid in list_grid : for group in list_group : df_output_grid_group = df_rsmp . loc [ grid , group ] path_save = save_df_grid_group ( df_output_grid_group , grid , group , site = site , dir_save = path_dir_save ) list_path_save . append ( path_save ) return list_path_save
save supy output dataframe to txt files
492
10
20,586
def save_df_state ( df_state : pd . DataFrame , site : str = '' , path_dir_save : Path = Path ( '.' ) , ) -> Path : file_state_save = 'df_state_{site}.csv' . format ( site = site ) # trim filename if site == '' file_state_save = file_state_save . replace ( '_.csv' , '.csv' ) path_state_save = path_dir_save / file_state_save print ( 'writing out: {path_out}' . format ( path_out = path_state_save ) ) df_state . to_csv ( path_state_save ) return path_state_save
save df_state to a csv file
156
9
20,587
def gen_FS_DF ( df_output ) : df_day = pd . pivot_table ( df_output , values = [ 'T2' , 'U10' , 'Kdown' , 'RH2' ] , index = [ 'Year' , 'Month' , 'Day' ] , aggfunc = [ min , max , np . mean , ] ) df_day_all_year = pd . pivot_table ( df_output , values = [ 'T2' , 'U10' , 'Kdown' , 'RH2' ] , index = [ 'Month' , 'Day' ] , aggfunc = [ min , max , np . mean , ] ) array_yr_mon = df_day . index . droplevel ( 'Day' ) . to_frame ( ) . drop_duplicates ( ) . values df_fs = pd . DataFrame ( { ( yr , mon ) : ( df_day . loc [ ( yr , mon ) ] . apply ( gen_score_ser ) - df_day_all_year . loc [ mon ] . apply ( gen_score_ser ) ) . abs ( ) . mean ( ) for yr , mon in array_yr_mon } ) return df_fs
generate DataFrame of scores .
272
7
20,588
def gen_WS_DF ( df_WS_data ) : df_fs = gen_FS_DF ( df_WS_data ) list_index = [ ( 'mean' , 'T2' ) , ( 'max' , 'T2' ) , ( 'min' , 'T2' ) , ( 'mean' , 'U10' ) , ( 'max' , 'U10' ) , ( 'min' , 'U10' ) , ( 'mean' , 'RH2' ) , ( 'max' , 'RH2' ) , ( 'min' , 'RH2' ) , ( 'mean' , 'Kdown' ) ] list_const = [ getattr ( const , attr ) for attr in [ 'T_MEAN' , 'T_MAX' , 'T_MIN' , 'WIND_MEAN' , 'WIND_MAX' , 'WIND_MIN' , 'RH_MEAN' , 'RH_MAX' , 'RH_MIN' , 'SOLAR_RADIATION_GLOBAL' ] ] list_ws = [ df_fs . loc [ idx ] * cst for idx , cst in zip ( list_index , list_const ) ] df_ws = pd . concat ( list_ws , axis = 1 ) . sum ( axis = 1 ) . unstack ( ) . dropna ( ) return df_ws
generate DataFrame of weighted sums .
310
8
20,589
def _geoid_radius ( latitude : float ) -> float : lat = deg2rad ( latitude ) return sqrt ( 1 / ( cos ( lat ) ** 2 / Rmax_WGS84 ** 2 + sin ( lat ) ** 2 / Rmin_WGS84 ** 2 ) )
Calculates the GEOID radius at a given latitude
62
12
20,590
def geometric2geopotential ( z : float , latitude : float ) -> float : twolat = deg2rad ( 2 * latitude ) g = 9.80616 * ( 1 - 0.002637 * cos ( twolat ) + 0.0000059 * cos ( twolat ) ** 2 ) re = _geoid_radius ( latitude ) return z * g * re / ( re + z )
Converts geometric height to geopoential height
89
9
20,591
def geopotential2geometric ( h : float , latitude : float ) -> float : twolat = deg2rad ( 2 * latitude ) g = 9.80616 * ( 1 - 0.002637 * cos ( twolat ) + 0.0000059 * cos ( twolat ) ** 2 ) re = _geoid_radius ( latitude ) return h * re / ( g * re - h )
Converts geopoential height to geometric height
90
9
20,592
def get_ser_val_alt ( lat : float , lon : float , da_alt_x : xr . DataArray , da_alt : xr . DataArray , da_val : xr . DataArray ) -> pd . Series : alt_t_1d = da_alt . sel ( latitude = lat , longitude = lon , method = 'nearest' ) val_t_1d = da_val . sel ( latitude = lat , longitude = lon , method = 'nearest' ) alt_x = da_alt_x . sel ( latitude = lat , longitude = lon , method = 'nearest' ) [ 0 ] val_alt = np . array ( [ interp1d ( alt_1d , val_1d ) ( alt_x ) for alt_1d , val_1d in zip ( alt_t_1d , val_t_1d ) ] ) ser_alt = pd . Series ( val_alt , index = da_val . time . values , name = da_val . name , ) return ser_alt
interpolate atmospheric variable to a specified altitude
245
9
20,593
def get_df_val_alt ( lat : float , lon : float , da_alt_meas : xr . DataArray , ds_val : xr . Dataset ) : da_alt = geopotential2geometric ( ds_val . z , ds_val . latitude ) # generate pressure series for grid x da_alt_x = da_alt . sel ( latitude = lat , longitude = lon , method = 'nearest' ) alt_meas_x = da_alt_meas . sel ( latitude = lat , longitude = lon , method = 'nearest' ) [ 0 ] val_pres = np . array ( [ interp1d ( alt , da_alt_x . level ) ( alt_meas_x ) for alt in da_alt_x ] ) df_val_alt = pd . concat ( [ get_ser_val_alt ( lat , lon , da_alt_meas , da_alt , ds_val [ var ] ) for var in ds_val . data_vars ] , axis = 1 ) # add pressure df_val_alt [ 'p' ] = val_pres df_val_alt . index = df_val_alt . index . set_names ( 'time' ) df_val_alt . columns = df_val_alt . columns . set_names ( 'var' ) return df_val_alt
interpolate atmospheric variables to a specified altitude
319
9
20,594
def sel_list_pres ( ds_sfc_x ) : p_min , p_max = ds_sfc_x . sp . min ( ) . values , ds_sfc_x . sp . max ( ) . values list_pres_level = [ '1' , '2' , '3' , '5' , '7' , '10' , '20' , '30' , '50' , '70' , '100' , '125' , '150' , '175' , '200' , '225' , '250' , '300' , '350' , '400' , '450' , '500' , '550' , '600' , '650' , '700' , '750' , '775' , '800' , '825' , '850' , '875' , '900' , '925' , '950' , '975' , '1000' , ] ser_pres_level = pd . Series ( list_pres_level ) . map ( int ) * 100 pos_lev_max , pos_lev_min = ( ser_pres_level [ ser_pres_level > p_max ] . idxmin ( ) , ser_pres_level [ ser_pres_level < p_min ] . idxmax ( ) ) list_pres_sel = ser_pres_level . loc [ pos_lev_min : pos_lev_max ] / 100 list_pres_sel = list_pres_sel . map ( int ) . map ( str ) . to_list ( ) return list_pres_sel
select proper levels for model level data download
356
8
20,595
def load_world ( filename ) : import ecell4_base vinfo = ecell4_base . core . load_version_information ( filename ) if vinfo . startswith ( "ecell4-bd" ) : return ecell4_base . bd . World ( filename ) elif vinfo . startswith ( "ecell4-egfrd" ) : return ecell4_base . egfrd . World ( filename ) elif vinfo . startswith ( "ecell4-meso" ) : return ecell4_base . meso . World ( filename ) elif vinfo . startswith ( "ecell4-ode" ) : return ecell4_base . ode . World ( filename ) elif vinfo . startswith ( "ecell4-gillespie" ) : return ecell4_base . gillespie . World ( filename ) elif vinfo . startswith ( "ecell4-spatiocyte" ) : return ecell4_base . spatiocyte . World ( filename ) elif vinfo == "" : raise RuntimeError ( "No version information was found in [{0}]" . format ( filename ) ) raise RuntimeError ( "Unkown version information [{0}]" . format ( vinfo ) )
Load a world from the given HDF5 filename . The return type is determined by ecell4_base . core . load_version_information .
285
31
20,596
def show ( target , * args , * * kwargs ) : if isinstance ( target , ( ecell4_base . core . FixedIntervalNumberObserver , ecell4_base . core . NumberObserver , ecell4_base . core . TimingNumberObserver , ) ) : plot_number_observer ( target , * args , * * kwargs ) elif isinstance ( target , ( ecell4_base . core . FixedIntervalTrajectoryObserver , ecell4_base . core . FixedIntervalTrackingObserver ) ) : plot_trajectory ( target , * args , * * kwargs ) elif isinstance ( target , ( ecell4_base . ode . ODEWorld , ecell4_base . gillespie . GillespieWorld , ecell4_base . spatiocyte . SpatiocyteWorld , ecell4_base . meso . MesoscopicWorld , ecell4_base . bd . BDWorld , ecell4_base . egfrd . EGFRDWorld ) ) : plot_world ( target , * args , * * kwargs ) elif isinstance ( target , ( ecell4_base . core . Model , ecell4_base . core . NetworkModel , ecell4_base . core . NetfreeModel ) ) : dump_model ( target ) elif isinstance ( target , str ) : try : w = simulation . load_world ( target ) except RuntimeError as e : raise ValueError ( "The given target [{}] is not supported." . format ( repr ( target ) ) ) else : show ( w , * args , * * kwargs ) else : raise ValueError ( "The given target [{}] is not supported." . format ( repr ( target ) ) )
An utility function to display the given target object in the proper way .
394
14
20,597
def print_batch_exception ( batch_exception ) : _log . error ( '-------------------------------------------' ) _log . error ( 'Exception encountered:' ) if batch_exception . error and batch_exception . error . message and batch_exception . error . message . value : _log . error ( batch_exception . error . message . value ) if batch_exception . error . values : _log . error ( '' ) for mesg in batch_exception . error . values : _log . error ( '{}:\t{}' . format ( mesg . key , mesg . value ) ) _log . error ( '-------------------------------------------' )
Prints the contents of the specified Batch exception .
145
11
20,598
def upload_file_to_container ( block_blob_client , container_name , file_path ) : blob_name = os . path . basename ( file_path ) _log . info ( 'Uploading file {} to container [{}]...' . format ( file_path , container_name ) ) block_blob_client . create_blob_from_path ( container_name , blob_name , file_path ) sas_token = block_blob_client . generate_blob_shared_access_signature ( container_name , blob_name , permission = azureblob . BlobPermissions . READ , expiry = datetime . datetime . utcnow ( ) + datetime . timedelta ( hours = 2 ) ) sas_url = block_blob_client . make_blob_url ( container_name , blob_name , sas_token = sas_token ) return batchmodels . ResourceFile ( http_url = sas_url , file_path = blob_name )
Uploads a local file to an Azure Blob storage container .
231
13
20,599
def get_container_sas_token ( block_blob_client , container_name , blob_permissions ) : # Obtain the SAS token for the container, setting the expiry time and # permissions. In this case, no start time is specified, so the shared # access signature becomes valid immediately. container_sas_token = block_blob_client . generate_container_shared_access_signature ( container_name , permission = blob_permissions , expiry = datetime . datetime . utcnow ( ) + datetime . timedelta ( hours = 2 ) ) return container_sas_token
Obtains a shared access signature granting the specified permissions to the container .
135
14