idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
17,900
def _object_resolution ( self , result_list ) : for a_result_item in enumerate ( result_list ) : for a_result_attribute in enumerate ( a_result_item [ 1 ] ) : try : resolved_object = a_result_attribute [ 1 ] if type ( a_result_attribute [ 1 ] ) is Node : resolved_object = self . _NODE_CLASS_REGISTRY [ frozenset ( a_result_attribute [ 1 ] . labels ) ] . inflate ( a_result_attribute [ 1 ] ) if type ( a_result_attribute [ 1 ] ) is list : resolved_object = self . _object_resolution ( [ a_result_attribute [ 1 ] ] ) result_list [ a_result_item [ 0 ] ] [ a_result_attribute [ 0 ] ] = resolved_object except KeyError : raise ModelDefinitionMismatch ( a_result_attribute [ 1 ] , self . _NODE_CLASS_REGISTRY ) return result_list
Performs in place automatic object resolution on a set of results returned by cypher_query .
17,901
def install_traversals ( cls , node_set ) : rels = cls . defined_properties ( rels = True , aliases = False , properties = False ) for key , value in rels . items ( ) : if hasattr ( node_set , key ) : raise ValueError ( "Can't install traversal '{0}' exists on NodeSet" . format ( key ) ) rel = getattr ( cls , key ) rel . _lookup_node_class ( ) traversal = Traversal ( source = node_set , name = key , definition = rel . definition ) setattr ( node_set , key , traversal )
For a StructuredNode class install Traversal objects for each relationship definition on a NodeSet instance
17,902
def process_filter_args ( cls , kwargs ) : output = { } for key , value in kwargs . items ( ) : if '__' in key : prop , operator = key . rsplit ( '__' ) operator = OPERATOR_TABLE [ operator ] else : prop = key operator = '=' if prop not in cls . defined_properties ( rels = False ) : raise ValueError ( "No such property {0} on {1}" . format ( prop , cls . __name__ ) ) property_obj = getattr ( cls , prop ) if isinstance ( property_obj , AliasProperty ) : prop = property_obj . aliased_to ( ) deflated_value = getattr ( cls , prop ) . deflate ( value ) else : if operator == _SPECIAL_OPERATOR_IN : if not isinstance ( value , tuple ) and not isinstance ( value , list ) : raise ValueError ( 'Value must be a tuple or list for IN operation {0}={1}' . format ( key , value ) ) deflated_value = [ property_obj . deflate ( v ) for v in value ] elif operator == _SPECIAL_OPERATOR_ISNULL : if not isinstance ( value , bool ) : raise ValueError ( 'Value must be a bool for isnull operation on {0}' . format ( key ) ) operator = 'IS NULL' if value else 'IS NOT NULL' deflated_value = None elif operator in _REGEX_OPERATOR_TABLE . values ( ) : deflated_value = property_obj . deflate ( value ) if not isinstance ( deflated_value , basestring ) : raise ValueError ( 'Must be a string value for {0}' . format ( key ) ) if operator in _STRING_REGEX_OPERATOR_TABLE . values ( ) : deflated_value = re . escape ( deflated_value ) deflated_value = operator . format ( deflated_value ) operator = _SPECIAL_OPERATOR_REGEX else : deflated_value = property_obj . deflate ( value ) db_property = cls . defined_properties ( rels = False ) [ prop ] . db_property or prop output [ db_property ] = ( operator , deflated_value ) return output
loop through properties in filter parameters check they match class definition deflate them and convert into something easy to generate cypher from
17,903
def process_has_args ( cls , kwargs ) : rel_definitions = cls . defined_properties ( properties = False , rels = True , aliases = False ) match , dont_match = { } , { } for key , value in kwargs . items ( ) : if key not in rel_definitions : raise ValueError ( "No such relation {0} defined on a {1}" . format ( key , cls . __name__ ) ) rhs_ident = key rel_definitions [ key ] . _lookup_node_class ( ) if value is True : match [ rhs_ident ] = rel_definitions [ key ] . definition elif value is False : dont_match [ rhs_ident ] = rel_definitions [ key ] . definition elif isinstance ( value , NodeSet ) : raise NotImplementedError ( "Not implemented yet" ) else : raise ValueError ( "Expecting True / False / NodeSet got: " + repr ( value ) ) return match , dont_match
loop through has parameters check they correspond to class rels defined
17,904
def build_traversal ( self , traversal ) : rhs_label = ':' + traversal . target_class . __label__ lhs_ident = self . build_source ( traversal . source ) rhs_ident = traversal . name + rhs_label self . _ast [ 'return' ] = traversal . name self . _ast [ 'result_class' ] = traversal . target_class rel_ident = self . create_ident ( ) stmt = _rel_helper ( lhs = lhs_ident , rhs = rhs_ident , ident = rel_ident , ** traversal . definition ) self . _ast [ 'match' ] . append ( stmt ) if traversal . filters : self . build_where_stmt ( rel_ident , traversal . filters ) return traversal . name
traverse a relationship from a node to a set of nodes
17,905
def build_label ( self , ident , cls ) : ident_w_label = ident + ':' + cls . __label__ self . _ast [ 'match' ] . append ( '({0})' . format ( ident_w_label ) ) self . _ast [ 'return' ] = ident self . _ast [ 'result_class' ] = cls return ident
match nodes by a label
17,906
def build_where_stmt ( self , ident , filters , q_filters = None , source_class = None ) : if q_filters is not None : stmts = self . _parse_q_filters ( ident , q_filters , source_class ) if stmts : self . _ast [ 'where' ] . append ( stmts ) else : stmts = [ ] for row in filters : negate = False if '__NOT__' in row and len ( row ) == 1 : negate = True row = row [ '__NOT__' ] for prop , op_and_val in row . items ( ) : op , val = op_and_val if op in _UNARY_OPERATORS : statement = '{0} {1}.{2} {3}' . format ( 'NOT' if negate else '' , ident , prop , op ) else : place_holder = self . _register_place_holder ( ident + '_' + prop ) statement = '{0} {1}.{2} {3} {{{4}}}' . format ( 'NOT' if negate else '' , ident , prop , op , place_holder ) self . _query_params [ place_holder ] = val stmts . append ( statement ) self . _ast [ 'where' ] . append ( ' AND ' . join ( stmts ) )
construct a where statement from some filters
17,907
def first ( self , ** kwargs ) : result = result = self . _get ( limit = 1 , ** kwargs ) if result : return result [ 0 ] else : raise self . source_class . DoesNotExist ( repr ( kwargs ) )
Retrieve the first node from the set matching supplied parameters
17,908
def filter ( self , * args , ** kwargs ) : if args or kwargs : self . q_filters = Q ( self . q_filters & Q ( * args , ** kwargs ) ) return self
Apply filters to the existing nodes in the set .
17,909
def exclude ( self , * args , ** kwargs ) : if args or kwargs : self . q_filters = Q ( self . q_filters & ~ Q ( * args , ** kwargs ) ) return self
Exclude nodes from the NodeSet via filters .
17,910
def order_by ( self , * props ) : should_remove = len ( props ) == 1 and props [ 0 ] is None if not hasattr ( self , '_order_by' ) or should_remove : self . _order_by = [ ] if should_remove : return self if '?' in props : self . _order_by . append ( '?' ) else : for prop in props : prop = prop . strip ( ) if prop . startswith ( '-' ) : prop = prop [ 1 : ] desc = True else : desc = False if prop not in self . source_class . defined_properties ( rels = False ) : raise ValueError ( "No such property {0} on {1}" . format ( prop , self . source_class . __name__ ) ) property_obj = getattr ( self . source_class , prop ) if isinstance ( property_obj , AliasProperty ) : prop = property_obj . aliased_to ( ) self . _order_by . append ( prop + ( ' DESC' if desc else '' ) ) return self
Order by properties . Prepend with minus to do descending . Pass None to remove ordering .
17,911
def match ( self , ** kwargs ) : if kwargs : if self . definition . get ( 'model' ) is None : raise ValueError ( "match() with filter only available on relationships with a model" ) output = process_filter_args ( self . definition [ 'model' ] , kwargs ) if output : self . filters . append ( output ) return self
Traverse relationships with properties matching the given parameters .
17,912
def inflate ( self , value ) : if not isinstance ( value , neo4j . types . spatial . Point ) : raise TypeError ( 'Invalid datatype to inflate. Expected POINT datatype, received {}' . format ( type ( value ) ) ) try : value_point_crs = SRID_TO_CRS [ value . srid ] except KeyError : raise ValueError ( 'Invalid SRID to inflate. ' 'Expected one of {}, received {}' . format ( SRID_TO_CRS . keys ( ) , value . srid ) ) if self . _crs != value_point_crs : raise ValueError ( 'Invalid CRS. ' 'Expected POINT defined over {}, received {}' . format ( self . _crs , value_point_crs ) ) if value . srid == 7203 : return NeomodelPoint ( x = value . x , y = value . y ) elif value . srid == 9157 : return NeomodelPoint ( x = value . x , y = value . y , z = value . z ) elif value . srid == 4326 : return NeomodelPoint ( longitude = value . longitude , latitude = value . latitude ) elif value . srid == 4979 : return NeomodelPoint ( longitude = value . longitude , latitude = value . latitude , height = value . height )
Handles the marshalling from Neo4J POINT to NeomodelPoint
17,913
def deflate ( self , value ) : if not isinstance ( value , NeomodelPoint ) : raise TypeError ( 'Invalid datatype to deflate. Expected NeomodelPoint, received {}' . format ( type ( value ) ) ) if not value . crs == self . _crs : raise ValueError ( 'Invalid CRS. ' 'Expected NeomodelPoint defined over {}, ' 'received NeomodelPoint defined over {}' . format ( self . _crs , value . crs ) ) if value . crs == 'cartesian-3d' : return neo4j . types . spatial . CartesianPoint ( ( value . x , value . y , value . z ) ) elif value . crs == 'cartesian' : return neo4j . types . spatial . CartesianPoint ( ( value . x , value . y ) ) elif value . crs == 'wgs-84' : return neo4j . types . spatial . WGS84Point ( ( value . longitude , value . latitude ) ) elif value . crs == 'wgs-84-3d' : return neo4j . types . spatial . WGS84Point ( ( value . longitude , value . latitude , value . height ) )
Handles the marshalling from NeomodelPoint to Neo4J POINT
17,914
def add ( self , data , conn_type , squash = True ) : if data in self . children : return data if not squash : self . children . append ( data ) return data if self . connector == conn_type : if ( isinstance ( data , QBase ) and not data . negated and ( data . connector == conn_type or len ( data ) == 1 ) ) : self . children . extend ( data . children ) return self else : self . children . append ( data ) return data else : obj = self . _new_instance ( self . children , self . connector , self . negated ) self . connector = conn_type self . children = [ obj , data ] return data
Combine this tree and the data represented by data using the connector conn_type . The combine is done by squashing the node other away if possible .
17,915
def _check_node ( self , obj ) : if not issubclass ( type ( obj ) , self . definition [ 'node_class' ] ) : raise ValueError ( "Expected node of class " + self . definition [ 'node_class' ] . __name__ ) if not hasattr ( obj , 'id' ) : raise ValueError ( "Can't perform operation on unsaved node " + repr ( obj ) )
check for valid node i . e correct class and is saved
17,916
def replace ( self , node , properties = None ) : self . disconnect_all ( ) self . connect ( node , properties )
Disconnect all existing nodes and connect the supplied node
17,917
def relationship ( self , node ) : self . _check_node ( node ) my_rel = _rel_helper ( lhs = 'us' , rhs = 'them' , ident = 'r' , ** self . definition ) q = "MATCH " + my_rel + " WHERE id(them)={them} and id(us)={self} RETURN r LIMIT 1" rels = self . source . cypher ( q , { 'them' : node . id } ) [ 0 ] if not rels : return rel_model = self . definition . get ( 'model' ) or StructuredRel return self . _set_start_end_cls ( rel_model . inflate ( rels [ 0 ] [ 0 ] ) , node )
Retrieve the relationship object for this first relationship between self and node .
17,918
def reconnect ( self , old_node , new_node ) : self . _check_node ( old_node ) self . _check_node ( new_node ) if old_node . id == new_node . id : return old_rel = _rel_helper ( lhs = 'us' , rhs = 'old' , ident = 'r' , ** self . definition ) result , meta = self . source . cypher ( "MATCH (us), (old) WHERE id(us)={self} and id(old)={old} " "MATCH " + old_rel + " RETURN r" , { 'old' : old_node . id } ) if result : node_properties = _get_node_properties ( result [ 0 ] [ 0 ] ) existing_properties = node_properties . keys ( ) else : raise NotConnected ( 'reconnect' , self . source , old_node ) new_rel = _rel_helper ( lhs = 'us' , rhs = 'new' , ident = 'r2' , ** self . definition ) q = "MATCH (us), (old), (new) " "WHERE id(us)={self} and id(old)={old} and id(new)={new} " "MATCH " + old_rel q += " CREATE UNIQUE" + new_rel for p in existing_properties : q += " SET r2.{0} = r.{1}" . format ( p , p ) q += " WITH r DELETE r" self . source . cypher ( q , { 'old' : old_node . id , 'new' : new_node . id } )
Disconnect old_node and connect new_node copying over any properties on the original relationship .
17,919
def disconnect ( self , node ) : rel = _rel_helper ( lhs = 'a' , rhs = 'b' , ident = 'r' , ** self . definition ) q = "MATCH (a), (b) WHERE id(a)={self} and id(b)={them} " "MATCH " + rel + " DELETE r" self . source . cypher ( q , { 'them' : node . id } )
Disconnect a node
17,920
def disconnect_all ( self ) : rhs = 'b:' + self . definition [ 'node_class' ] . __label__ rel = _rel_helper ( lhs = 'a' , rhs = rhs , ident = 'r' , ** self . definition ) q = 'MATCH (a) WHERE id(a)={self} MATCH ' + rel + ' DELETE r' self . source . cypher ( q )
Disconnect all nodes
17,921
def connect ( self , node , properties = None ) : if len ( self ) : raise AttemptedCardinalityViolation ( "Node already has {0} can't connect more" . format ( self ) ) else : return super ( ZeroOrOne , self ) . connect ( node , properties )
Connect to a node .
17,922
def single ( self ) : nodes = super ( OneOrMore , self ) . all ( ) if nodes : return nodes [ 0 ] raise CardinalityViolation ( self , 'none' )
Fetch one of the related nodes
17,923
def drop_indexes ( quiet = True , stdout = None ) : results , meta = db . cypher_query ( "CALL db.indexes()" ) pattern = re . compile ( ':(.*)\((.*)\)' ) for index in results : db . cypher_query ( 'DROP ' + index [ 0 ] ) match = pattern . search ( index [ 0 ] ) stdout . write ( ' - Dropping index on label {0} with property {1}.\n' . format ( match . group ( 1 ) , match . group ( 2 ) ) ) stdout . write ( "\n" )
Discover and drop all indexes .
17,924
def remove_all_labels ( stdout = None ) : if not stdout : stdout = sys . stdout stdout . write ( "Droping constraints...\n" ) drop_constraints ( quiet = False , stdout = stdout ) stdout . write ( 'Droping indexes...\n' ) drop_indexes ( quiet = False , stdout = stdout )
Calls functions for dropping constraints and indexes .
17,925
def install_labels ( cls , quiet = True , stdout = None ) : if not hasattr ( cls , '__label__' ) : if not quiet : stdout . write ( ' ! Skipping class {0}.{1} is abstract\n' . format ( cls . __module__ , cls . __name__ ) ) return for name , property in cls . defined_properties ( aliases = False , rels = False ) . items ( ) : db_property = property . db_property or name if property . index : if not quiet : stdout . write ( ' + Creating index {0} on label {1} for class {2}.{3}\n' . format ( name , cls . __label__ , cls . __module__ , cls . __name__ ) ) db . cypher_query ( "CREATE INDEX on :{0}({1}); " . format ( cls . __label__ , db_property ) ) elif property . unique_index : if not quiet : stdout . write ( ' + Creating unique constraint for {0} on label {1} for class {2}.{3}\n' . format ( name , cls . __label__ , cls . __module__ , cls . __name__ ) ) db . cypher_query ( "CREATE CONSTRAINT " "on (n:{0}) ASSERT n.{1} IS UNIQUE; " . format ( cls . __label__ , db_property ) )
Setup labels with indexes and constraints for a given class
17,926
def _build_merge_query ( cls , merge_params , update_existing = False , lazy = False , relationship = None ) : query_params = dict ( merge_params = merge_params ) n_merge = "n:{0} {{{1}}}" . format ( ":" . join ( cls . inherited_labels ( ) ) , ", " . join ( "{0}: params.create.{0}" . format ( getattr ( cls , p ) . db_property or p ) for p in cls . __required_properties__ ) ) if relationship is None : query = "UNWIND {{merge_params}} as params\n MERGE ({0})\n " . format ( n_merge ) else : if not isinstance ( relationship . source , StructuredNode ) : raise ValueError ( "relationship source [{0}] is not a StructuredNode" . format ( repr ( relationship . source ) ) ) relation_type = relationship . definition . get ( 'relation_type' ) if not relation_type : raise ValueError ( 'No relation_type is specified on provided relationship' ) from . match import _rel_helper query_params [ "source_id" ] = relationship . source . id query = "MATCH (source:{0}) WHERE ID(source) = {{source_id}}\n " . format ( relationship . source . __label__ ) query += "WITH source\n UNWIND {merge_params} as params \n " query += "MERGE " query += _rel_helper ( lhs = 'source' , rhs = n_merge , ident = None , relation_type = relation_type , direction = relationship . definition [ 'direction' ] ) query += "ON CREATE SET n = params.create\n " if update_existing is True : query += "ON MATCH SET n += params.update\n" if lazy : query += "RETURN id(n)" else : query += "RETURN n" return query , query_params
Get a tuple of a CYPHER query and a params dict for the specified MERGE query .
17,927
def create ( cls , * props , ** kwargs ) : if 'streaming' in kwargs : warnings . warn ( 'streaming is not supported by bolt, please remove the kwarg' , category = DeprecationWarning , stacklevel = 1 ) lazy = kwargs . get ( 'lazy' , False ) query = "CREATE (n:{0} {{create_params}})" . format ( ':' . join ( cls . inherited_labels ( ) ) ) if lazy : query += " RETURN id(n)" else : query += " RETURN n" results = [ ] for item in [ cls . deflate ( p , obj = _UnsavedNode ( ) , skip_empty = True ) for p in props ] : node , _ = db . cypher_query ( query , { 'create_params' : item } ) results . extend ( node [ 0 ] ) nodes = [ cls . inflate ( node ) for node in results ] if not lazy and hasattr ( cls , 'post_create' ) : for node in nodes : node . post_create ( ) return nodes
Call to CREATE with parameters map . A new instance will be created and saved .
17,928
def create_or_update ( cls , * props , ** kwargs ) : lazy = kwargs . get ( 'lazy' , False ) relationship = kwargs . get ( 'relationship' ) create_or_update_params = [ ] for specified , deflated in [ ( p , cls . deflate ( p , skip_empty = True ) ) for p in props ] : create_or_update_params . append ( { "create" : deflated , "update" : dict ( ( k , v ) for k , v in deflated . items ( ) if k in specified ) } ) query , params = cls . _build_merge_query ( create_or_update_params , update_existing = True , relationship = relationship , lazy = lazy ) if 'streaming' in kwargs : warnings . warn ( 'streaming is not supported by bolt, please remove the kwarg' , category = DeprecationWarning , stacklevel = 1 ) results = db . cypher_query ( query , params ) return [ cls . inflate ( r [ 0 ] ) for r in results [ 0 ] ]
Call to MERGE with parameters map . A new instance will be created and saved if does not already exists this is an atomic operation . If an instance already exists all optional properties specified will be updated .
17,929
def cypher ( self , query , params = None ) : self . _pre_action_check ( 'cypher' ) params = params or { } params . update ( { 'self' : self . id } ) return db . cypher_query ( query , params )
Execute a cypher query with the param self pre - populated with the nodes neo4j id .
17,930
def delete ( self ) : self . _pre_action_check ( 'delete' ) self . cypher ( "MATCH (self) WHERE id(self)={self} " "OPTIONAL MATCH (self)-[r]-()" " DELETE r, self" ) delattr ( self , 'id' ) self . deleted = True return True
Delete a node and it s relationships
17,931
def get_or_create ( cls , * props , ** kwargs ) : lazy = kwargs . get ( 'lazy' , False ) relationship = kwargs . get ( 'relationship' ) get_or_create_params = [ { "create" : cls . deflate ( p , skip_empty = True ) } for p in props ] query , params = cls . _build_merge_query ( get_or_create_params , relationship = relationship , lazy = lazy ) if 'streaming' in kwargs : warnings . warn ( 'streaming is not supported by bolt, please remove the kwarg' , category = DeprecationWarning , stacklevel = 1 ) results = db . cypher_query ( query , params ) return [ cls . inflate ( r [ 0 ] ) for r in results [ 0 ] ]
Call to MERGE with parameters map . A new instance will be created and saved if does not already exists this is an atomic operation . Parameters must contain all required properties any non required properties with defaults will be generated .
17,932
def inherited_labels ( cls ) : return [ scls . __label__ for scls in cls . mro ( ) if hasattr ( scls , '__label__' ) and not hasattr ( scls , '__abstract_node__' ) ]
Return list of labels from nodes class hierarchy .
17,933
def refresh ( self ) : self . _pre_action_check ( 'refresh' ) if hasattr ( self , 'id' ) : request = self . cypher ( "MATCH (n) WHERE id(n)={self}" " RETURN n" ) [ 0 ] if not request or not request [ 0 ] : raise self . __class__ . DoesNotExist ( "Can't refresh non existent node" ) node = self . inflate ( request [ 0 ] [ 0 ] ) for key , val in node . __properties__ . items ( ) : setattr ( self , key , val ) else : raise ValueError ( "Can't refresh unsaved node" )
Reload the node from neo4j
17,934
def save ( self ) : if hasattr ( self , 'id' ) : params = self . deflate ( self . __properties__ , self ) query = "MATCH (n) WHERE id(n)={self} \n" query += "\n" . join ( [ "SET n.{0} = {{{1}}}" . format ( key , key ) + "\n" for key in params . keys ( ) ] ) for label in self . inherited_labels ( ) : query += "SET n:`{0}`\n" . format ( label ) self . cypher ( query , params ) elif hasattr ( self , 'deleted' ) and self . deleted : raise ValueError ( "{0}.save() attempted on deleted node" . format ( self . __class__ . __name__ ) ) else : self . id = self . create ( self . __properties__ ) [ 0 ] . id return self
Save the node to neo4j or raise an exception
17,935
def default_value ( self ) : if self . has_default : if hasattr ( self . default , '__call__' ) : return self . default ( ) else : return self . default else : raise Exception ( "No default value specified" )
Generate a default value
17,936
def save ( self ) : props = self . deflate ( self . __properties__ ) query = "MATCH ()-[r]->() WHERE id(r)={self} " for key in props : query += " SET r.{0} = {{{1}}}" . format ( key , key ) props [ 'self' ] = self . id db . cypher_query ( query , props ) return self
Save the relationship
17,937
def start_node ( self ) : return db . cypher_query ( "MATCH (aNode) " "WHERE id(aNode)={nodeid} " "RETURN aNode" . format ( nodeid = self . _start_node_id ) , resolve_objects = True ) [ 0 ] [ 0 ] [ 0 ]
Get start node
17,938
def end_node ( self ) : return db . cypher_query ( "MATCH (aNode) " "WHERE id(aNode)={nodeid} " "RETURN aNode" . format ( nodeid = self . _end_node_id ) , resolve_objects = True ) [ 0 ] [ 0 ] [ 0 ]
Get end node
17,939
def get_coi ( self , params_dict ) : lat = str ( params_dict [ 'lat' ] ) lon = str ( params_dict [ 'lon' ] ) start = params_dict [ 'start' ] interval = params_dict [ 'interval' ] if start is None : timeref = 'current' else : if interval is None : timeref = self . _trim_to ( timeformatutils . to_date ( start ) , 'year' ) else : timeref = self . _trim_to ( timeformatutils . to_date ( start ) , interval ) fixed_url = '%s/%s,%s/%s.json' % ( CO_INDEX_URL , lat , lon , timeref ) uri = http_client . HttpClient . to_url ( fixed_url , self . _API_key , None ) _ , json_data = self . _client . cacheable_get_json ( uri ) return json_data
Invokes the CO Index endpoint
17,940
def parse_JSON ( self , JSON_string ) : if JSON_string is None : raise parse_response_error . ParseResponseError ( 'JSON data is None' ) d = json . loads ( JSON_string ) try : alert_id = d [ '_id' ] t = d [ 'last_update' ] . split ( '.' ) [ 0 ] . replace ( 'T' , ' ' ) + '+00' alert_last_update = timeformatutils . _ISO8601_to_UNIXtime ( t ) alert_trigger_id = d [ 'triggerId' ] alert_met_conds = [ dict ( current_value = c [ 'current_value' ] [ 'min' ] , condition = Condition . from_dict ( c [ 'condition' ] ) ) for c in d [ 'conditions' ] ] alert_coords = d [ 'coordinates' ] return Alert ( alert_id , alert_trigger_id , alert_met_conds , alert_coords , last_update = alert_last_update ) except ValueError as e : raise parse_response_error . ParseResponseError ( 'Impossible to parse JSON: %s' % e ) except KeyError as e : raise parse_response_error . ParseResponseError ( 'Impossible to parse JSON: %s' % e )
Parses a pyowm . alertapi30 . alert . Alert instance out of raw JSON data .
17,941
def add ( self , data ) : node = LinkedListNode ( data , None ) if self . _size == 0 : self . _first_node = node self . _last_node = node else : second_node = self . _first_node self . _first_node = node self . _first_node . update_next ( second_node ) self . _size += 1
Adds a new data node to the front list . The provided data will be encapsulated into a new instance of LinkedListNode class and linked list pointers will be updated as well as list s size .
17,942
def remove ( self , data ) : current_node = self . _first_node deleted = False if self . _size == 0 : return if data == current_node . data ( ) : if current_node . next ( ) is None : self . _first_node = LinkedListNode ( None , None ) self . _last_node = self . _first_node self . _size = 0 return current_node = current_node . next ( ) self . _first_node = current_node self . _size -= 1 return while True : if current_node is None : deleted = False break next_node = current_node . next ( ) if next_node is not None : if data == next_node . data ( ) : next_next_node = next_node . next ( ) current_node . update_next ( next_next_node ) next_node = None deleted = True break current_node = current_node . next ( ) if deleted : self . _size -= 1
Removes a data node from the list . If the list contains more than one node having the same data that shall be removed then the node having the first occurrency of the data is removed .
17,943
def contains ( self , data ) : for item in self : if item . data ( ) == data : return True return False
Checks if the provided data is stored in at least one node of the list .
17,944
def pop ( self ) : popped = False result = None current_node = self . _first_node while not popped : next_node = current_node . next ( ) next_next_node = next_node . next ( ) if not next_next_node : self . _last_node = current_node self . _last_node . update_next ( None ) self . _size -= 1 result = next_node . data ( ) popped = True current_node = next_node return result
Removes the last node from the list
17,945
def get_sunset_time ( self , timeformat = 'unix' ) : if self . _sunset_time is None : return None return timeformatutils . timeformat ( self . _sunset_time , timeformat )
Returns the GMT time of sunset
17,946
def get_sunrise_time ( self , timeformat = 'unix' ) : if self . _sunrise_time is None : return None return timeformatutils . timeformat ( self . _sunrise_time , timeformat )
Returns the GMT time of sunrise
17,947
def get_temperature ( self , unit = 'kelvin' ) : to_be_converted = dict ( ) not_to_be_converted = dict ( ) for label , temp in self . _temperature . items ( ) : if temp is None or temp < 0 : not_to_be_converted [ label ] = temp else : to_be_converted [ label ] = temp converted = temputils . kelvin_dict_to ( to_be_converted , unit ) return dict ( list ( converted . items ( ) ) + list ( not_to_be_converted . items ( ) ) )
Returns a dict with temperature info
17,948
def creation_time ( self , timeformat = 'unix' ) : if self . created_at is None : return None return timeformatutils . timeformat ( self . created_at , timeformat )
Returns the UTC time of creation of this station
17,949
def last_update_time ( self , timeformat = 'unix' ) : if self . updated_at is None : return None return timeformatutils . timeformat ( self . updated_at , timeformat )
Returns the UTC time of the last update on this station s metadata
17,950
def bounding_polygon ( self ) : lon_left , lat_bottom , lon_right , lat_top = Tile . tile_coords_to_bbox ( self . x , self . y , self . zoom ) print ( lon_left , lat_bottom , lon_right , lat_top ) return Polygon ( [ [ [ lon_left , lat_top ] , [ lon_right , lat_top ] , [ lon_right , lat_bottom ] , [ lon_left , lat_bottom ] , [ lon_left , lat_top ] ] ] )
Returns the bounding box polygon for this tile
17,951
def tile_coords_for_point ( cls , geopoint , zoom ) : return Tile . geoocoords_to_tile_coords ( geopoint . lon , geopoint . lat , zoom )
Returns the coordinates of the tile containing the specified geopoint at the specified zoom level
17,952
def get_tile ( self , x , y , zoom ) : status , data = self . http_client . get_png ( ROOT_TILE_URL % self . map_layer + '/%s/%s/%s.png' % ( zoom , x , y ) , params = { 'appid' : self . API_key } ) img = Image ( data , ImageTypeEnum . PNG ) return Tile ( x , y , zoom , self . map_layer , img )
Retrieves the tile having the specified coordinates and zoom level
17,953
def get_triggers ( self ) : status , data = self . http_client . get_json ( TRIGGERS_URI , params = { 'appid' : self . API_key } , headers = { 'Content-Type' : 'application/json' } ) return [ self . trigger_parser . parse_dict ( item ) for item in data ]
Retrieves all of the user s triggers that are set on the Weather Alert API .
17,954
def get_trigger ( self , trigger_id ) : assert isinstance ( trigger_id , str ) , "Value must be a string" status , data = self . http_client . get_json ( NAMED_TRIGGER_URI % trigger_id , params = { 'appid' : self . API_key } , headers = { 'Content-Type' : 'application/json' } ) return self . trigger_parser . parse_dict ( data )
Retrieves the named trigger from the Weather Alert API .
17,955
def delete_trigger ( self , trigger ) : assert trigger is not None assert isinstance ( trigger . id , str ) , "Value must be a string" status , _ = self . http_client . delete ( NAMED_TRIGGER_URI % trigger . id , params = { 'appid' : self . API_key } , headers = { 'Content-Type' : 'application/json' } )
Deletes from the Alert API the trigger record identified by the ID of the provided pyowm . alertapi30 . trigger . Trigger along with all related alerts
17,956
def get_stations ( self ) : status , data = self . http_client . get_json ( STATIONS_URI , params = { 'appid' : self . API_key } , headers = { 'Content-Type' : 'application/json' } ) return [ self . stations_parser . parse_dict ( item ) for item in data ]
Retrieves all of the user s stations registered on the Stations API .
17,957
def get_station ( self , id ) : status , data = self . http_client . get_json ( NAMED_STATION_URI % str ( id ) , params = { 'appid' : self . API_key } , headers = { 'Content-Type' : 'application/json' } ) return self . stations_parser . parse_dict ( data )
Retrieves a named station registered on the Stations API .
17,958
def create_station ( self , external_id , name , lat , lon , alt = None ) : assert external_id is not None assert name is not None assert lon is not None assert lat is not None if lon < - 180.0 or lon > 180.0 : raise ValueError ( "'lon' value must be between -180 and 180" ) if lat < - 90.0 or lat > 90.0 : raise ValueError ( "'lat' value must be between -90 and 90" ) if alt is not None : if alt < 0.0 : raise ValueError ( "'alt' value must not be negative" ) status , payload = self . http_client . post ( STATIONS_URI , params = { 'appid' : self . API_key } , data = dict ( external_id = external_id , name = name , lat = lat , lon = lon , alt = alt ) , headers = { 'Content-Type' : 'application/json' } ) return self . stations_parser . parse_dict ( payload )
Create a new station on the Station API with the given parameters
17,959
def send_measurement ( self , measurement ) : assert measurement is not None assert measurement . station_id is not None status , _ = self . http_client . post ( MEASUREMENTS_URI , params = { 'appid' : self . API_key } , data = [ self . _structure_dict ( measurement ) ] , headers = { 'Content-Type' : 'application/json' } )
Posts the provided Measurement object s data to the Station API .
17,960
def send_measurements ( self , list_of_measurements ) : assert list_of_measurements is not None assert all ( [ m . station_id is not None for m in list_of_measurements ] ) msmts = [ self . _structure_dict ( m ) for m in list_of_measurements ] status , _ = self . http_client . post ( MEASUREMENTS_URI , params = { 'appid' : self . API_key } , data = msmts , headers = { 'Content-Type' : 'application/json' } )
Posts data about the provided list of Measurement objects to the Station API . The objects may be related to different station IDs .
17,961
def get_measurements ( self , station_id , aggregated_on , from_timestamp , to_timestamp , limit = 100 ) : assert station_id is not None assert aggregated_on is not None assert from_timestamp is not None assert from_timestamp > 0 assert to_timestamp is not None assert to_timestamp > 0 if to_timestamp < from_timestamp : raise ValueError ( "End timestamp can't be earlier than begin timestamp" ) assert isinstance ( limit , int ) assert limit >= 0 query = { 'appid' : self . API_key , 'station_id' : station_id , 'type' : aggregated_on , 'from' : from_timestamp , 'to' : to_timestamp , 'limit' : limit } status , data = self . http_client . get_json ( MEASUREMENTS_URI , params = query , headers = { 'Content-Type' : 'application/json' } ) return [ self . aggregated_measurements_parser . parse_dict ( item ) for item in data ]
Reads measurements of a specified station recorded in the specified time window and aggregated on minute hour or day . Optionally the number of resulting measurements can be limited .
17,962
def send_buffer ( self , buffer ) : assert buffer is not None msmts = [ self . _structure_dict ( m ) for m in buffer . measurements ] status , _ = self . http_client . post ( MEASUREMENTS_URI , params = { 'appid' : self . API_key } , data = msmts , headers = { 'Content-Type' : 'application/json' } )
Posts to the Stations API data about the Measurement objects contained into the provided Buffer instance .
17,963
def create_DOM_node_from_dict ( d , name , parent_node ) : if d is not None : root_dict_node = ET . SubElement ( parent_node , name ) for key , value in d . items ( ) : if value is not None : node = ET . SubElement ( root_dict_node , key ) node . text = str ( value ) return root_dict_node
Dumps dict data to an xml . etree . ElementTree . SubElement DOM subtree object and attaches it to the specified DOM parent node . The created subtree object is named after the specified name . If the supplied dict is None no DOM node is created for it as well as no DOM subnodes are generated for eventual None values found inside the dict
17,964
def DOM_node_to_XML ( tree , xml_declaration = True ) : result = ET . tostring ( tree , encoding = 'utf8' , method = 'xml' ) . decode ( 'utf-8' ) if not xml_declaration : result = result . split ( "<?xml version='1.0' encoding='utf8'?>\n" ) [ 1 ] return result
Prints a DOM tree to its Unicode representation .
17,965
def annotate_with_XMLNS ( tree , prefix , URI ) : if not ET . iselement ( tree ) : tree = tree . getroot ( ) tree . attrib [ 'xmlns:' + prefix ] = URI iterator = tree . iter ( ) next ( iterator ) for e in iterator : e . tag = prefix + ":" + e . tag
Annotates the provided DOM tree with XMLNS attributes and adds XMLNS prefixes to the tags of the tree nodes .
17,966
def with_img_type ( self , image_type ) : assert isinstance ( image_type , ImageType ) return list ( filter ( lambda x : x . image_type == image_type , self . metaimages ) )
Returns the search results having the specified image type
17,967
def with_preset ( self , preset ) : assert isinstance ( preset , str ) return list ( filter ( lambda x : x . preset == preset , self . metaimages ) )
Returns the seach results having the specified preset
17,968
def with_img_type_and_preset ( self , image_type , preset ) : assert isinstance ( image_type , ImageType ) assert isinstance ( preset , str ) return list ( filter ( lambda x : x . image_type == image_type and x . preset == preset , self . metaimages ) )
Returns the search results having both the specified image type and preset
17,969
def status_for ( self , code ) : is_in = lambda start , end , n : True if start <= n <= end else False for status in self . _code_ranges_dict : for _range in self . _code_ranges_dict [ status ] : if is_in ( _range [ 'start' ] , _range [ 'end' ] , code ) : return status return None
Returns the weather status related to the specified weather status code if any is stored None otherwise .
17,970
def creation_time ( self , timeformat = 'unix' ) : if self . timestamp is None : return None return timeformatutils . timeformat ( self . timestamp , timeformat )
Returns the UTC time of creation of this aggregated measurement
17,971
def to_dict ( self ) : return { 'station_id' : self . station_id , 'timestamp' : self . timestamp , 'aggregated_on' : self . aggregated_on , 'temp' : self . temp , 'humidity' : self . humidity , 'wind' : self . wind , 'pressure' : self . pressure , 'precipitation' : self . precipitation }
Dumps object fields into a dict
17,972
def to_dict ( self ) : return { 'station_id' : self . station_id , 'timestamp' : self . timestamp , 'temperature' : self . temperature , 'wind_speed' : self . wind_speed , 'wind_gust' : self . wind_gust , 'wind_deg' : self . wind_deg , 'pressure' : self . pressure , 'humidity' : self . humidity , 'rain_1h' : self . rain_1h , 'rain_6h' : self . rain_6h , 'rain_24h' : self . rain_24h , 'snow_1h' : self . snow_1h , 'snow_6h' : self . snow_6h , 'snow_24h' : self . snow_24h , 'dew_point' : self . dew_point , 'humidex' : self . humidex , 'heat_index' : self . heat_index , 'visibility_distance' : self . visibility_distance , 'visibility_prefix' : self . visibility_prefix , 'clouds_distance' : self . clouds_distance , 'clouds_condition' : self . clouds_condition , 'clouds_cumulus' : self . clouds_cumulus , 'weather_precipitation' : self . weather_precipitation , 'weather_descriptor' : self . weather_descriptor , 'weather_intensity' : self . weather_intensity , 'weather_proximity' : self . weather_proximity , 'weather_obscuration' : self . weather_obscuration , 'weather_other' : self . weather_other }
Dumps object fields into a dictionary
17,973
def to_geopoint ( self ) : if self . _lon is None or self . _lat is None : return None return geo . Point ( self . _lon , self . _lat )
Returns the geoJSON compliant representation of this location
17,974
def get ( self , request_url ) : try : cached_item = self . _table [ request_url ] cur_time = timeutils . now ( 'unix' ) if cur_time - cached_item [ 'insertion_time' ] > self . _item_lifetime : self . _clean_item ( request_url ) return None cached_item [ 'insertion_time' ] = cur_time self . _promote ( request_url ) return cached_item [ 'data' ] except : return None
In case of a hit returns the JSON string which represents the OWM web API response to the request being identified by a specific string URL and updates the recency of this request .
17,975
def set ( self , request_url , response_json ) : if self . size ( ) == self . _max_size : popped = self . _usage_recency . pop ( ) del self . _table [ popped ] current_time = timeutils . now ( 'unix' ) if request_url not in self . _table : self . _table [ request_url ] = { 'data' : response_json , 'insertion_time' : current_time } self . _usage_recency . add ( request_url ) else : self . _table [ request_url ] [ 'insertion_time' ] = current_time self . _promote ( request_url )
Checks if the maximum size of the cache has been reached and in case discards the least recently used item from usage_recency and table ; then adds the response_json to be cached to the table dict using as a lookup key the request_url of the request that generated the value ; finally adds it at the front of usage_recency
17,976
def _promote ( self , request_url ) : self . _usage_recency . remove ( request_url ) self . _usage_recency . add ( request_url )
Moves the cache item specified by request_url to the front of the usage_recency list
17,977
def _clean_item ( self , request_url ) : del self . _table [ request_url ] self . _usage_recency . remove ( request_url )
Removes the specified item from the cache s datastructures
17,978
def clean ( self ) : self . _table . clear ( ) for item in self . _usage_recency : self . _usage_recency . remove ( item )
Empties the cache
17,979
def sort_reverse_chronologically ( self ) : self . measurements . sort ( key = lambda m : m . timestamp , reverse = True )
Sorts the measurements of this buffer in reverse chronological order
17,980
def surface_temp ( self , unit = 'kelvin' ) : if unit == 'kelvin' : return self . _surface_temp if unit == 'celsius' : return temputils . kelvin_to_celsius ( self . _surface_temp ) if unit == 'fahrenheit' : return temputils . kelvin_to_fahrenheit ( self . _surface_temp ) else : raise ValueError ( 'Wrong temperature unit' )
Returns the soil surface temperature
17,981
def ten_cm_temp ( self , unit = 'kelvin' ) : if unit == 'kelvin' : return self . _ten_cm_temp if unit == 'celsius' : return temputils . kelvin_to_celsius ( self . _ten_cm_temp ) if unit == 'fahrenheit' : return temputils . kelvin_to_fahrenheit ( self . _ten_cm_temp ) else : raise ValueError ( 'Wrong temperature unit' )
Returns the soil temperature measured 10 cm below surface
17,982
def assert_is_lat ( val ) : assert type ( val ) is float or type ( val ) is int , "Value must be a number" if val < - 90.0 or val > 90.0 : raise ValueError ( "Latitude value must be between -90 and 90" )
Checks it the given value is a feasible decimal latitude
17,983
def assert_is_lon ( val ) : assert type ( val ) is float or type ( val ) is int , "Value must be a number" if val < - 180.0 or val > 180.0 : raise ValueError ( "Longitude value must be between -180 and 180" )
Checks it the given value is a feasible decimal longitude
17,984
def create_polygon ( self , geopolygon , name = None ) : assert geopolygon is not None assert isinstance ( geopolygon , GeoPolygon ) data = dict ( ) data [ 'geo_json' ] = { "type" : "Feature" , "properties" : { } , "geometry" : geopolygon . as_dict ( ) } if name is not None : data [ 'name' ] = name status , payload = self . http_client . post ( POLYGONS_URI , params = { 'appid' : self . API_key } , data = data , headers = { 'Content-Type' : 'application/json' } ) return Polygon . from_dict ( payload )
Create a new polygon on the Agro API with the given parameters
17,985
def get_polygons ( self ) : status , data = self . http_client . get_json ( POLYGONS_URI , params = { 'appid' : self . API_key } , headers = { 'Content-Type' : 'application/json' } ) return [ Polygon . from_dict ( item ) for item in data ]
Retrieves all of the user s polygons registered on the Agro API .
17,986
def get_polygon ( self , polygon_id ) : status , data = self . http_client . get_json ( NAMED_POLYGON_URI % str ( polygon_id ) , params = { 'appid' : self . API_key } , headers = { 'Content-Type' : 'application/json' } ) return Polygon . from_dict ( data )
Retrieves a named polygon registered on the Agro API .
17,987
def update_polygon ( self , polygon ) : assert polygon . id is not None status , _ = self . http_client . put ( NAMED_POLYGON_URI % str ( polygon . id ) , params = { 'appid' : self . API_key } , data = dict ( name = polygon . name ) , headers = { 'Content-Type' : 'application/json' } )
Updates on the Agro API the Polygon identified by the ID of the provided polygon object . Currently this only changes the mnemonic name of the remote polygon
17,988
def delete_polygon ( self , polygon ) : assert polygon . id is not None status , _ = self . http_client . delete ( NAMED_POLYGON_URI % str ( polygon . id ) , params = { 'appid' : self . API_key } , headers = { 'Content-Type' : 'application/json' } )
Deletes on the Agro API the Polygon identified by the ID of the provided polygon object .
17,989
def soil_data ( self , polygon ) : assert polygon is not None assert isinstance ( polygon , Polygon ) polyd = polygon . id status , data = self . http_client . get_json ( SOIL_URI , params = { 'appid' : self . API_key , 'polyid' : polyd } , headers = { 'Content-Type' : 'application/json' } ) the_dict = dict ( ) the_dict [ 'reference_time' ] = data [ 'dt' ] the_dict [ 'surface_temp' ] = data [ 't0' ] the_dict [ 'ten_cm_temp' ] = data [ 't10' ] the_dict [ 'moisture' ] = data [ 'moisture' ] the_dict [ 'polygon_id' ] = polyd return Soil . from_dict ( the_dict )
Retrieves the latest soil data on the specified polygon
17,990
def stats_for_satellite_image ( self , metaimage ) : if metaimage . preset != PresetEnum . EVI and metaimage . preset != PresetEnum . NDVI : raise ValueError ( "Unsupported image preset: should be EVI or NDVI" ) if metaimage . stats_url is None : raise ValueError ( "URL for image statistics is not defined" ) status , data = self . http_client . get_json ( metaimage . stats_url , params = { } ) return data
Retrieves statistics for the satellite image described by the provided metadata . This is currently only supported EVI and NDVI presets
17,991
def is_API_online ( self ) : params = { 'q' : 'London,GB' } uri = http_client . HttpClient . to_url ( OBSERVATION_URL , self . _API_key , self . _subscription_type ) try : _1 , _2 = self . _wapi . cacheable_get_json ( uri , params = params ) return True except api_call_error . APICallTimeoutError : return False
Returns True if the OWM Weather API is currently online . A short timeout is used to determine API service availability .
17,992
def _retrieve_station_history ( self , station_ID , limit , interval ) : params = { 'id' : station_ID , 'type' : interval , 'lang' : self . _language } if limit is not None : params [ 'cnt' ] = limit uri = http_client . HttpClient . to_url ( STATION_WEATHER_HISTORY_URL , self . _API_key , self . _subscription_type , self . _use_ssl ) _ , json_data = self . _wapi . cacheable_get_json ( uri , params = params ) station_history = self . _parsers [ 'station_history' ] . parse_JSON ( json_data ) if station_history is not None : station_history . set_station_ID ( station_ID ) station_history . set_interval ( interval ) return station_history
Helper method for station_X_history functions .
17,993
def uvindex_forecast_around_coords ( self , lat , lon ) : geo . assert_is_lon ( lon ) geo . assert_is_lat ( lat ) params = { 'lon' : lon , 'lat' : lat } json_data = self . _uvapi . get_uvi_forecast ( params ) uvindex_list = self . _parsers [ 'uvindex_list' ] . parse_JSON ( json_data ) return uvindex_list
Queries the OWM Weather API for forecast Ultra Violet values in the next 8 days in the surroundings of the provided geocoordinates .
17,994
def uvindex_history_around_coords ( self , lat , lon , start , end = None ) : geo . assert_is_lon ( lon ) geo . assert_is_lat ( lat ) assert start is not None start = timeformatutils . timeformat ( start , 'unix' ) if end is None : end = timeutils . now ( timeformat = 'unix' ) else : end = timeformatutils . timeformat ( end , 'unix' ) params = { 'lon' : lon , 'lat' : lat , 'start' : start , 'end' : end } json_data = self . _uvapi . get_uvi_history ( params ) uvindex_list = self . _parsers [ 'uvindex_list' ] . parse_JSON ( json_data ) return uvindex_list
Queries the OWM Weather API for UV index historical values in the surroundings of the provided geocoordinates and in the specified time frame . If the end of the time frame is not provided that is intended to be the current datetime .
17,995
def kelvin_dict_to ( d , target_temperature_unit ) : if target_temperature_unit == 'kelvin' : return d elif target_temperature_unit == 'celsius' : return { key : kelvin_to_celsius ( d [ key ] ) for key in d } elif target_temperature_unit == 'fahrenheit' : return { key : kelvin_to_fahrenheit ( d [ key ] ) for key in d } else : raise ValueError ( "Invalid value for target temperature conversion \ unit" )
Converts all the values in a dict from Kelvin temperatures to the specified temperature format .
17,996
def kelvin_to_celsius ( kelvintemp ) : if kelvintemp < 0 : raise ValueError ( __name__ + ": negative temperature values not allowed" ) celsiustemp = kelvintemp - KELVIN_OFFSET return float ( "{0:.2f}" . format ( celsiustemp ) )
Converts a numeric temperature from Kelvin degrees to Celsius degrees
17,997
def kelvin_to_fahrenheit ( kelvintemp ) : if kelvintemp < 0 : raise ValueError ( __name__ + ": negative temperature values not allowed" ) fahrenheittemp = ( kelvintemp - KELVIN_OFFSET ) * FAHRENHEIT_DEGREE_SCALE + FAHRENHEIT_OFFSET return float ( "{0:.2f}" . format ( fahrenheittemp ) )
Converts a numeric temperature from Kelvin degrees to Fahrenheit degrees
17,998
def id_for ( self , city_name ) : line = self . _lookup_line_by_city_name ( city_name ) return int ( line . split ( "," ) [ 1 ] ) if line is not None else None
Returns the long ID corresponding to the first city found that matches the provided city name . The lookup is case insensitive .
17,999
def find_closest_weather ( weathers_list , unixtime ) : if not weathers_list : return None if not is_in_coverage ( unixtime , weathers_list ) : raise api_response_error . NotFoundError ( 'Error: the specified time is ' + 'not included in the weather coverage range' ) closest_weather = weathers_list [ 0 ] time_distance = abs ( closest_weather . get_reference_time ( ) - unixtime ) for weather in weathers_list : if abs ( weather . get_reference_time ( ) - unixtime ) < time_distance : time_distance = abs ( weather . get_reference_time ( ) - unixtime ) closest_weather = weather return closest_weather
Extracts from the provided list of Weather objects the item which is closest in time to the provided UNIXtime .