idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
48,600 | def datetimes ( self ) : if self . _datetimes is None : self . _datetimes = tuple ( self . header . analysis_period . datetimes ) return self . _datetimes | Return datetimes for this collection as a tuple . |
48,601 | def interpolate_to_timestep ( self , timestep , cumulative = None ) : assert timestep % self . header . analysis_period . timestep == 0 , 'Target timestep({}) must be divisable by current timestep({})' . format ( timestep , self . header . analysis_period . timestep ) if cumulative is not None : assert isinstance ( cumulative , bool ) , 'Expected Boolean. Got {}' . format ( type ( cumulative ) ) _new_values = [ ] _data_length = len ( self . _values ) for d in xrange ( _data_length ) : for _v in self . _xxrange ( self [ d ] , self [ ( d + 1 ) % _data_length ] , timestep ) : _new_values . append ( _v ) native_cumulative = self . header . data_type . cumulative if cumulative is True or ( cumulative is None and native_cumulative ) : for i , d in enumerate ( _new_values ) : _new_values [ i ] = d / timestep if self . header . data_type . point_in_time is False : shift_dist = int ( timestep / 2 ) _new_values = _new_values [ - shift_dist : ] + _new_values [ : - shift_dist ] a_per = self . header . analysis_period _new_a_per = AnalysisPeriod ( a_per . st_month , a_per . st_day , a_per . st_hour , a_per . end_month , a_per . end_day , a_per . end_hour , timestep , a_per . is_leap_year ) _new_header = self . header . duplicate ( ) _new_header . _analysis_period = _new_a_per return HourlyContinuousCollection ( _new_header , _new_values ) | Interpolate data for a finer timestep using a linear interpolation . |
48,602 | def filter_by_hoys ( self , hoys ) : existing_hoys = self . header . analysis_period . hoys hoys = [ h for h in hoys if h in existing_hoys ] _moys = tuple ( int ( hour * 60 ) for hour in hoys ) return self . filter_by_moys ( _moys ) | Filter the Data Collection based onva list of hoys . |
48,603 | def to_immutable ( self ) : if self . _enumeration is None : self . _get_mutable_enumeration ( ) col_obj = self . _enumeration [ 'immutable' ] [ self . _collection_type ] return col_obj ( self . header , self . values ) | Get an immutable version of this collection . |
48,604 | def to_discontinuous ( self ) : collection = HourlyDiscontinuousCollection ( self . header . duplicate ( ) , self . values , self . datetimes ) collection . _validated_a_period = True return collection | Return a discontinuous version of the current collection . |
48,605 | def _get_analysis_period_subset ( self , a_per ) : if self . header . analysis_period . is_annual : return a_per new_needed = False n_ap = [ a_per . st_month , a_per . st_day , a_per . st_hour , a_per . end_month , a_per . end_day , a_per . end_hour , a_per . timestep , a_per . is_leap_year ] if a_per . st_hour < self . header . analysis_period . st_hour : n_ap [ 2 ] = self . header . analysis_period . st_hour new_needed = True if a_per . end_hour > self . header . analysis_period . end_hour : n_ap [ 5 ] = self . header . analysis_period . end_hour new_needed = True if a_per . st_time . doy < self . header . analysis_period . st_time . doy : n_ap [ 0 ] = self . header . analysis_period . st_month n_ap [ 1 ] = self . header . analysis_period . st_day new_needed = True if a_per . end_time . doy > self . header . analysis_period . end_time . doy : n_ap [ 3 ] = self . header . analysis_period . end_month n_ap [ 4 ] = self . header . analysis_period . end_day new_needed = True if new_needed is False : return a_per else : return AnalysisPeriod ( * n_ap ) | Return an analysis_period is always a subset of the Data Collection |
48,606 | def _monthly_operation ( self , operation , percentile = 0 ) : if operation == 'average' : funct = self . _average elif operation == 'total' : funct = self . _total else : assert 0 <= percentile <= 100 , 'percentile must be between 0 and 100. Got {}' . format ( percentile ) funct = self . _get_percentile_function ( percentile ) data_dict = self . group_by_month ( ) new_data , d_times = [ ] , [ ] for i in self . header . analysis_period . months_int : vals = data_dict [ i ] if vals != [ ] : new_data . append ( funct ( vals ) ) d_times . append ( i ) new_header = self . header . duplicate ( ) if operation == 'percentile' : new_header . metadata [ 'operation' ] = '{} percentile' . format ( percentile ) else : new_header . metadata [ 'operation' ] = operation collection = MonthlyCollection ( new_header , new_data , d_times ) collection . _validated_a_period = True return collection | Get a MonthlyCollection given a certain operation . |
48,607 | def to_unit ( self , values , unit , from_unit ) : return self . _to_unit_base ( 'degC-days' , values , unit , from_unit ) | Return values converted to the unit given the input from_unit . |
48,608 | def bisect ( a , b , fn , epsilon , target ) : while ( abs ( b - a ) > 2 * epsilon ) : midpoint = ( b + a ) / 2 a_t = fn ( a ) b_t = fn ( b ) midpoint_t = fn ( midpoint ) if ( a_t - target ) * ( midpoint_t - target ) < 0 : b = midpoint elif ( b_t - target ) * ( midpoint_t - target ) < 0 : a = midpoint else : return - 999 return midpoint | The simplest root - finding algorithm . |
48,609 | def from_json ( cls , data ) : optional_keys = ( 'city' , 'state' , 'country' , 'latitude' , 'longitude' , 'time_zone' , 'elevation' , 'station_id' , 'source' ) for key in optional_keys : if key not in data : data [ key ] = None return cls ( data [ 'city' ] , data [ 'state' ] , data [ 'country' ] , data [ 'latitude' ] , data [ 'longitude' ] , data [ 'time_zone' ] , data [ 'elevation' ] , data [ 'station_id' ] , data [ 'source' ] ) | Create a location from a dictionary . |
48,610 | def from_location ( cls , location ) : if not location : return cls ( ) try : if hasattr ( location , 'isLocation' ) : return location elif hasattr ( location , 'Latitude' ) : return cls ( city = str ( location . Name . replace ( "," , " " ) ) , latitude = location . Latitude , longitude = location . Longitude ) elif location . startswith ( 'Site:' ) : loc , city , latitude , longitude , time_zone , elevation = [ x . strip ( ) for x in re . findall ( r'\r*\n*([^\r\n]*)[,|;]' , location , re . DOTALL ) ] else : try : city , latitude , longitude , time_zone , elevation = [ key . split ( ":" ) [ - 1 ] . strip ( ) for key in location . split ( "," ) ] except ValueError : return cls ( city = location ) return cls ( city = city , country = None , latitude = latitude , longitude = longitude , time_zone = time_zone , elevation = elevation ) except Exception as e : raise ValueError ( "Failed to create a Location from %s!\n%s" % ( location , e ) ) | Try to create a Ladybug location from a location string . |
48,611 | def duplicate ( self ) : return Location ( self . city , self . state , self . country , self . latitude , self . longitude , self . time_zone , self . elevation , self . station_id , self . source ) | Duplicate location . |
48,612 | def ep_style_location_string ( self ) : return "Site:Location,\n " + self . city + ',\n ' + str ( self . latitude ) + ', !Latitude\n ' + str ( self . longitude ) + ', !Longitude\n ' + str ( self . time_zone ) + ', !Time Zone\n ' + str ( self . elevation ) + '; !Elevation' | Return EnergyPlus s location string . |
48,613 | def from_missing_values ( cls , is_leap_year = False ) : epw_obj = cls ( None ) epw_obj . _is_leap_year = is_leap_year epw_obj . _location = Location ( ) analysis_period = AnalysisPeriod ( is_leap_year = is_leap_year ) headers = [ ] for field_number in xrange ( epw_obj . _num_of_fields ) : field = EPWFields . field_by_number ( field_number ) header = Header ( data_type = field . name , unit = field . unit , analysis_period = analysis_period ) headers . append ( header ) epw_obj . _data . append ( [ ] ) uncertainty = '?9?9?9?9E0?9?9?9?9?9?9?9?9?9?9?9?9?9?9?9*9*9?9?9?9' for dt in analysis_period . datetimes : hr = dt . hour if dt . hour != 0 else 24 epw_obj . _data [ 0 ] . append ( dt . year ) epw_obj . _data [ 1 ] . append ( dt . month ) epw_obj . _data [ 2 ] . append ( dt . day ) epw_obj . _data [ 3 ] . append ( hr ) epw_obj . _data [ 4 ] . append ( 0 ) epw_obj . _data [ 5 ] . append ( uncertainty ) calc_length = len ( analysis_period . datetimes ) for field_number in xrange ( 6 , epw_obj . _num_of_fields ) : field = EPWFields . field_by_number ( field_number ) mis_val = field . missing if field . missing is not None else 0 for dt in xrange ( calc_length ) : epw_obj . _data [ field_number ] . append ( mis_val ) for i in xrange ( epw_obj . _num_of_fields ) : epw_obj . _data [ i ] = HourlyContinuousCollection ( headers [ i ] , epw_obj . _data [ i ] ) epw_obj . _is_header_loaded = True epw_obj . _is_data_loaded = True return epw_obj | Initalize an EPW object with all data missing or empty . |
48,614 | def from_json ( cls , data ) : epw_obj = cls ( None ) epw_obj . _is_header_loaded = True epw_obj . _is_data_loaded = True required_keys = ( 'location' , 'data_collections' ) option_keys_dict = ( 'metadata' , 'heating_dict' , 'cooling_dict' , 'extremes_dict' , 'extreme_hot_weeks' , 'extreme_cold_weeks' , 'typical_weeks' , 'monthly_ground_temps' ) for key in required_keys : assert key in data , 'Required key "{}" is missing!' . format ( key ) assert len ( data [ 'data_collections' ] ) == epw_obj . _num_of_fields , 'The number of data_collections must be {}. Got {}.' . format ( epw_obj . _num_of_fields , len ( data [ 'data_collections' ] ) ) for key in option_keys_dict : if key not in data : data [ key ] = { } epw_obj . _location = Location . from_json ( data [ 'location' ] ) epw_obj . _data = [ HourlyContinuousCollection . from_json ( dc ) for dc in data [ 'data_collections' ] ] if 'is_leap_year' in data : epw_obj . _is_leap_year = data [ 'is_leap_year' ] if 'is_ip' in data : epw_obj . _is_ip = data [ 'is_ip' ] for dc in epw_obj . _data : assert isinstance ( dc , HourlyContinuousCollection ) , 'data_collections must ' 'be of HourlyContinuousCollection schema. Got {}' . format ( type ( dc ) ) assert dc . header . analysis_period . is_annual , 'data_collections ' 'analysis_period must be annual.' assert dc . header . analysis_period . is_leap_year == epw_obj . _is_leap_year , 'data_collections is_leap_year is not aligned with that of the EPW.' epw_obj . _metadata = data [ 'metadata' ] epw_obj . heating_design_condition_dictionary = data [ 'heating_dict' ] epw_obj . cooling_design_condition_dictionary = data [ 'cooling_dict' ] epw_obj . extreme_design_condition_dictionary = data [ 'extremes_dict' ] def _dejson ( parent_dict , obj ) : new_dict = { } for key , val in parent_dict . items ( ) : new_dict [ key ] = obj . from_json ( val ) return new_dict epw_obj . extreme_hot_weeks = _dejson ( data [ 'extreme_hot_weeks' ] , AnalysisPeriod ) epw_obj . extreme_cold_weeks = _dejson ( data [ 'extreme_cold_weeks' ] , AnalysisPeriod ) epw_obj . typical_weeks = _dejson ( data [ 'typical_weeks' ] , AnalysisPeriod ) epw_obj . monthly_ground_temperature = _dejson ( data [ 'monthly_ground_temps' ] , MonthlyCollection ) if 'daylight_savings_start' in data : epw_obj . daylight_savings_start = data [ 'daylight_savings_start' ] if 'daylight_savings_end' in data : epw_obj . daylight_savings_end = data [ 'daylight_savings_end' ] if 'comments_1' in data : epw_obj . comments_1 = data [ 'comments_1' ] if 'comments_2' in data : epw_obj . comments_2 = data [ 'comments_2' ] return epw_obj | Create EPW from json dictionary . |
48,615 | def annual_cooling_design_day_010 ( self ) : self . _load_header_check ( ) if bool ( self . _cooling_dict ) is True : avg_press = self . atmospheric_station_pressure . average avg_press = None if avg_press == 999999 else avg_press return DesignDay . from_ashrae_dict_cooling ( self . _cooling_dict , self . location , True , avg_press ) else : return None | A design day object representing the annual 1 . 0% cooling design day . |
48,616 | def _des_dict_check ( self , des_dict , req_keys , cond_name ) : assert isinstance ( des_dict , dict ) , '{}' ' must be a dictionary. Got {}.' . format ( cond_name , type ( des_dict ) ) if bool ( des_dict ) is True : input_keys = list ( des_dict . keys ( ) ) for key in req_keys : assert key in input_keys , 'Required key "{}" was not found in ' '{}' . format ( key , cond_name ) | Check if an input design condition dictionary is acceptable . |
48,617 | def _format_week ( self , name , type , a_per ) : return '{},{},{}/{},{}/{}' . format ( name , type , a_per . st_month , a_per . st_day , a_per . end_month , a_per . end_day ) | Format an AnalysisPeriod into string for the EPW header . |
48,618 | def _format_grndt ( self , data_c ) : monthly_str = '{},{},{},{}' . format ( data_c . header . metadata [ 'soil conductivity' ] , data_c . header . metadata [ 'soil density' ] , data_c . header . metadata [ 'soil specific heat' ] , ',' . join ( [ '%.2f' % x for x in data_c . values ] ) ) return monthly_str | Format monthly ground data collection into string for the EPW header . |
48,619 | def save ( self , file_path ) : if not self . is_data_loaded : self . _import_data ( ) originally_ip = False if self . is_ip is True : self . convert_to_si ( ) originally_ip = True lines = self . header try : for field in xrange ( 0 , self . _num_of_fields ) : point_in_time = self . _data [ field ] . header . data_type . point_in_time if point_in_time is True : first_hour = self . _data [ field ] . _values . pop ( 0 ) self . _data [ field ] . _values . append ( first_hour ) annual_a_per = AnalysisPeriod ( is_leap_year = self . is_leap_year ) for hour in xrange ( 0 , len ( annual_a_per . datetimes ) ) : line = [ ] for field in xrange ( 0 , self . _num_of_fields ) : line . append ( str ( self . _data [ field ] . _values [ hour ] ) ) lines . append ( "," . join ( line ) + "\n" ) except IndexError : length_error_msg = 'Data length is not for a full year and cannot be ' + 'saved as an EPW file.' raise ValueError ( length_error_msg ) else : file_data = '' . join ( lines ) write_to_file ( file_path , file_data , True ) finally : del ( lines ) for field in xrange ( 0 , self . _num_of_fields ) : point_in_time = self . _data [ field ] . header . data_type . point_in_time if point_in_time is True : last_hour = self . _data [ field ] . _values . pop ( ) self . _data [ field ] . _values . insert ( 0 , last_hour ) if originally_ip is True : self . convert_to_ip ( ) return file_path | Save epw object as an epw file . |
48,620 | def convert_to_ip ( self ) : if not self . is_data_loaded : self . _import_data ( ) if self . is_ip is False : for coll in self . _data : coll . convert_to_ip ( ) self . _is_ip = True | Convert all Data Collections of this EPW object to IP units . |
48,621 | def _get_data_by_field ( self , field_number ) : if not self . is_data_loaded : self . _import_data ( ) if not 0 <= field_number < self . _num_of_fields : raise ValueError ( "Field number should be between 0-%d" % self . _num_of_fields ) return self . _data [ field_number ] | Return a data field by field number . |
48,622 | def sky_temperature ( self ) : sky_temp_header = Header ( data_type = temperature . SkyTemperature ( ) , unit = 'C' , analysis_period = AnalysisPeriod ( ) , metadata = self . _metadata ) horiz_ir = self . _get_data_by_field ( 12 ) . values sky_temp_data = [ calc_sky_temperature ( hir ) for hir in horiz_ir ] return HourlyContinuousCollection ( sky_temp_header , sky_temp_data ) | Return annual Sky Temperature as a Ladybug Data Collection . |
48,623 | def to_wea ( self , file_path , hoys = None ) : hoys = hoys or xrange ( len ( self . direct_normal_radiation . datetimes ) ) if not file_path . lower ( ) . endswith ( '.wea' ) : file_path += '.wea' originally_ip = False if self . is_ip is True : self . convert_to_si ( ) originally_ip = True lines = [ self . _get_wea_header ( ) ] datetimes = self . direct_normal_radiation . datetimes for hoy in hoys : dir_rad = self . direct_normal_radiation [ hoy ] dif_rad = self . diffuse_horizontal_radiation [ hoy ] line = "%d %d %.3f %d %d\n" % ( datetimes [ hoy ] . month , datetimes [ hoy ] . day , datetimes [ hoy ] . hour + 0.5 , dir_rad , dif_rad ) lines . append ( line ) file_data = '' . join ( lines ) write_to_file ( file_path , file_data , True ) if originally_ip is True : self . convert_to_ip ( ) return file_path | Write an wea file from the epw file . |
48,624 | def to_json ( self ) : if not self . is_data_loaded : self . _import_data ( ) def jsonify_dict ( base_dict ) : new_dict = { } for key , val in base_dict . items ( ) : new_dict [ key ] = val . to_json ( ) return new_dict hot_wks = jsonify_dict ( self . extreme_hot_weeks ) cold_wks = jsonify_dict ( self . extreme_cold_weeks ) typ_wks = jsonify_dict ( self . typical_weeks ) grnd_temps = jsonify_dict ( self . monthly_ground_temperature ) return { 'location' : self . location . to_json ( ) , 'data_collections' : [ dc . to_json ( ) for dc in self . _data ] , 'metadata' : self . metadata , 'heating_dict' : self . heating_design_condition_dictionary , 'cooling_dict' : self . cooling_design_condition_dictionary , 'extremes_dict' : self . extreme_design_condition_dictionary , 'extreme_hot_weeks' : hot_wks , 'extreme_cold_weeks' : cold_wks , 'typical_weeks' : typ_wks , "monthly_ground_temps" : grnd_temps , "is_ip" : self . _is_ip , "is_leap_year" : self . is_leap_year , "daylight_savings_start" : self . daylight_savings_start , "daylight_savings_end" : self . daylight_savings_end , "comments_1" : self . comments_1 , "comments_2" : self . comments_2 } | Convert the EPW to a dictionary . |
48,625 | def from_analysis_period ( cls , analysis_period = None ) : if not analysis_period : return cls ( ) elif hasattr ( analysis_period , 'isAnalysisPeriod' ) : return analysis_period elif isinstance ( analysis_period , str ) : try : return cls . from_string ( analysis_period ) except Exception as e : raise ValueError ( "{} is not convertable to an AnalysisPeriod: {}" . format ( analysis_period , e ) ) | Create and AnalysisPeriod from an analysis period . |
48,626 | def from_string ( cls , analysis_period_string ) : is_leap_year = True if analysis_period_string . strip ( ) [ - 1 ] == '*' else False ap = analysis_period_string . lower ( ) . replace ( ' ' , '' ) . replace ( 'to' , ' ' ) . replace ( 'and' , ' ' ) . replace ( '/' , ' ' ) . replace ( 'between' , ' ' ) . replace ( '@' , ' ' ) . replace ( '*' , '' ) try : st_month , st_day , end_month , end_day , st_hour , end_hour , timestep = ap . split ( ' ' ) return cls ( st_month , st_day , st_hour , end_month , end_day , end_hour , int ( timestep ) , is_leap_year ) except Exception as e : raise ValueError ( str ( e ) ) | Create an Analysis Period object from an analysis period string . |
48,627 | def datetimes ( self ) : if self . _timestamps_data is None : self . _calculate_timestamps ( ) return tuple ( DateTime . from_moy ( moy , self . is_leap_year ) for moy in self . _timestamps_data ) | A sorted list of datetimes in this analysis period . |
48,628 | def hoys ( self ) : if self . _timestamps_data is None : self . _calculate_timestamps ( ) return tuple ( moy / 60.0 for moy in self . _timestamps_data ) | A sorted list of hours of year in this analysis period . |
48,629 | def hoys_int ( self ) : if self . _timestamps_data is None : self . _calculate_timestamps ( ) return tuple ( int ( moy / 60.0 ) for moy in self . _timestamps_data ) | A sorted list of hours of year in this analysis period as integers . |
48,630 | def doys_int ( self ) : if not self . _is_reversed : return self . _calc_daystamps ( self . st_time , self . end_time ) else : doys_st = self . _calc_daystamps ( self . st_time , DateTime . from_hoy ( 8759 ) ) doys_end = self . _calc_daystamps ( DateTime . from_hoy ( 0 ) , self . end_time ) return doys_st + doys_end | A sorted list of days of the year in this analysis period as integers . |
48,631 | def months_int ( self ) : if not self . _is_reversed : return list ( xrange ( self . st_time . month , self . end_time . month + 1 ) ) else : months_st = list ( xrange ( self . st_time . month , 13 ) ) months_end = list ( xrange ( 1 , self . end_time . month + 1 ) ) return months_st + months_end | A sorted list of months of the year in this analysis period as integers . |
48,632 | def months_per_hour ( self ) : month_hour = [ ] hour_range = xrange ( self . st_hour , self . end_hour + 1 ) for month in self . months_int : month_hour . extend ( [ ( month , hr ) for hr in hour_range ] ) return month_hour | A list of tuples representing months per hour in this analysis period . |
48,633 | def is_annual ( self ) : if ( self . st_month , self . st_day , self . st_hour , self . end_month , self . end_day , self . end_hour ) == ( 1 , 1 , 0 , 12 , 31 , 23 ) : return True else : return False | Check if an analysis period is annual . |
48,634 | def is_possible_hour ( self , hour ) : if hour > 23 and self . is_possible_hour ( 0 ) : hour = int ( hour ) if not self . _is_overnight : return self . st_time . hour <= hour <= self . end_time . hour else : return self . st_time . hour <= hour <= 23 or 0 <= hour <= self . end_time . hour | Check if a float hour is a possible hour for this analysis period . |
48,635 | def is_time_included ( self , time ) : if self . _timestamps_data is None : self . _calculate_timestamps ( ) return time . moy in self . _timestamps_data | Check if time is included in analysis period . |
48,636 | def duplicate ( self ) : return AnalysisPeriod ( self . st_month , self . st_day , self . st_hour , self . end_month , self . end_day , self . end_hour , self . timestep , self . is_leap_year ) | Return a copy of the analysis period . |
48,637 | def to_json ( self ) : return { 'st_month' : self . st_month , 'st_day' : self . st_day , 'st_hour' : self . st_hour , 'end_month' : self . end_month , 'end_day' : self . end_day , 'end_hour' : self . end_hour , 'timestep' : self . timestep , 'is_leap_year' : self . is_leap_year } | Convert the analysis period to a dictionary . |
48,638 | def _calc_timestamps ( self , st_time , end_time ) : curr = datetime ( st_time . year , st_time . month , st_time . day , st_time . hour , st_time . minute , self . is_leap_year ) end_time = datetime ( end_time . year , end_time . month , end_time . day , end_time . hour , end_time . minute , self . is_leap_year ) while curr <= end_time : if self . is_possible_hour ( curr . hour + ( curr . minute / 60.0 ) ) : time = DateTime ( curr . month , curr . day , curr . hour , curr . minute , self . is_leap_year ) self . _timestamps_data . append ( time . moy ) curr += self . minute_intervals if self . timestep != 1 and curr . hour == 23 and self . is_possible_hour ( 0 ) : curr = end_time for i in list ( xrange ( self . timestep ) ) [ 1 : ] : curr += self . minute_intervals time = DateTime ( curr . month , curr . day , curr . hour , curr . minute , self . is_leap_year ) self . _timestamps_data . append ( time . moy ) | Calculate timesteps between start time and end time . |
48,639 | def _calculate_timestamps ( self ) : self . _timestamps_data = [ ] if not self . _is_reversed : self . _calc_timestamps ( self . st_time , self . end_time ) else : self . _calc_timestamps ( self . st_time , DateTime . from_hoy ( 8759 ) ) self . _calc_timestamps ( DateTime . from_hoy ( 0 ) , self . end_time ) | Return a list of Ladybug DateTime in this analysis period . |
48,640 | def _calc_daystamps ( self , st_time , end_time ) : start_doy = sum ( self . _num_of_days_each_month [ : st_time . month - 1 ] ) + st_time . day end_doy = sum ( self . _num_of_days_each_month [ : end_time . month - 1 ] ) + end_time . day + 1 return list ( range ( start_doy , end_doy ) ) | Calculate days of the year between start time and end time . |
48,641 | def from_values ( cls , location , direct_normal_irradiance , diffuse_horizontal_irradiance , timestep = 1 , is_leap_year = False ) : err_message = 'For timestep %d, %d number of data for %s is expected. ' '%d is provided.' if len ( direct_normal_irradiance ) % cls . hour_count ( is_leap_year ) == 0 : err_message = err_message + ' Did you forget to set the timestep to %d?' % ( len ( direct_normal_irradiance ) / cls . hour_count ( is_leap_year ) ) assert len ( direct_normal_irradiance ) / timestep == cls . hour_count ( is_leap_year ) , err_message % ( timestep , timestep * cls . hour_count ( is_leap_year ) , 'direct normal irradiance' , len ( direct_normal_irradiance ) ) assert len ( diffuse_horizontal_irradiance ) / timestep == cls . hour_count ( is_leap_year ) , err_message % ( timestep , timestep * cls . hour_count ( is_leap_year ) , 'diffuse_horizontal_irradiance' , len ( direct_normal_irradiance ) ) metadata = { 'source' : location . source , 'country' : location . country , 'city' : location . city } dnr , dhr = cls . _get_data_collections ( direct_normal_irradiance , diffuse_horizontal_irradiance , metadata , timestep , is_leap_year ) return cls ( location , dnr , dhr , timestep , is_leap_year ) | Create wea from a list of irradiance values . |
48,642 | def from_file ( cls , weafile , timestep = 1 , is_leap_year = False ) : assert os . path . isfile ( weafile ) , 'Failed to find {}' . format ( weafile ) location = Location ( ) with open ( weafile , readmode ) as weaf : first_line = weaf . readline ( ) assert first_line . startswith ( 'place' ) , 'Failed to find place in header. ' '{} is not a valid wea file.' . format ( weafile ) location . city = ' ' . join ( first_line . split ( ) [ 1 : ] ) location . latitude = float ( weaf . readline ( ) . split ( ) [ - 1 ] ) location . longitude = - float ( weaf . readline ( ) . split ( ) [ - 1 ] ) location . time_zone = - int ( weaf . readline ( ) . split ( ) [ - 1 ] ) / 15 location . elevation = float ( weaf . readline ( ) . split ( ) [ - 1 ] ) weaf . readline ( ) direct_normal_irradiance = [ ] diffuse_horizontal_irradiance = [ ] for line in weaf : dirn , difh = [ int ( v ) for v in line . split ( ) [ - 2 : ] ] direct_normal_irradiance . append ( dirn ) diffuse_horizontal_irradiance . append ( difh ) return cls . from_values ( location , direct_normal_irradiance , diffuse_horizontal_irradiance , timestep , is_leap_year ) | Create wea object from a wea file . |
48,643 | def from_epw_file ( cls , epwfile , timestep = 1 ) : is_leap_year = False epw = EPW ( epwfile ) direct_normal , diffuse_horizontal = cls . _get_data_collections ( epw . direct_normal_radiation . values , epw . diffuse_horizontal_radiation . values , epw . metadata , 1 , is_leap_year ) if timestep != 1 : print ( "Note: timesteps greater than 1 on epw-generated Wea's \n" + "are suitable for thermal models but are not recommended \n" + "for daylight models." ) direct_normal = direct_normal . interpolate_to_timestep ( timestep ) diffuse_horizontal = diffuse_horizontal . interpolate_to_timestep ( timestep ) sp = Sunpath . from_location ( epw . location ) for i , dt in enumerate ( cls . _get_datetimes ( timestep , is_leap_year ) ) : sun = sp . calculate_sun_from_date_time ( dt ) if sun . altitude < 0 : direct_normal [ i ] = 0 diffuse_horizontal [ i ] = 0 return cls ( epw . location , direct_normal , diffuse_horizontal , timestep , is_leap_year ) | Create a wea object using the solar irradiance values in an epw file . |
48,644 | def from_stat_file ( cls , statfile , timestep = 1 , is_leap_year = False ) : stat = STAT ( statfile ) def check_missing ( opt_data , data_name ) : if opt_data == [ ] : raise ValueError ( 'Stat file contains no optical data.' ) for i , x in enumerate ( opt_data ) : if x is None : raise ValueError ( 'Missing optical depth data for {} at month {}' . format ( data_name , i ) ) check_missing ( stat . monthly_tau_beam , 'monthly_tau_beam' ) check_missing ( stat . monthly_tau_diffuse , 'monthly_tau_diffuse' ) return cls . from_ashrae_revised_clear_sky ( stat . location , stat . monthly_tau_beam , stat . monthly_tau_diffuse , timestep , is_leap_year ) | Create an ASHRAE Revised Clear Sky wea object from the monthly sky optical depths in a . stat file . |
48,645 | def from_zhang_huang_solar ( cls , location , cloud_cover , relative_humidity , dry_bulb_temperature , wind_speed , atmospheric_pressure = None , timestep = 1 , is_leap_year = False , use_disc = False ) : assert len ( cloud_cover ) == len ( relative_humidity ) == len ( dry_bulb_temperature ) == len ( wind_speed ) , 'lengths of input climate data must match.' assert len ( cloud_cover ) / timestep == cls . hour_count ( is_leap_year ) , 'input climate data must be annual.' assert isinstance ( timestep , int ) , 'timestep must be an' ' integer. Got {}' . format ( type ( timestep ) ) if atmospheric_pressure is not None : assert len ( atmospheric_pressure ) == len ( cloud_cover ) , 'length pf atmospheric_pressure must match the other input lists.' else : atmospheric_pressure = [ 101325 ] * cls . hour_count ( is_leap_year ) * timestep sp = Sunpath . from_location ( location ) sp . is_leap_year = is_leap_year date_times = [ ] altitudes = [ ] doys = [ ] dry_bulb_t3_hrs = [ ] for count , t_date in enumerate ( cls . _get_datetimes ( timestep , is_leap_year ) ) : date_times . append ( t_date ) sun = sp . calculate_sun_from_date_time ( t_date ) altitudes . append ( sun . altitude ) doys . append ( sun . datetime . doy ) dry_bulb_t3_hrs . append ( dry_bulb_temperature [ count - ( 3 * timestep ) ] ) dir_ir , diff_ir = zhang_huang_solar_split ( altitudes , doys , cloud_cover , relative_humidity , dry_bulb_temperature , dry_bulb_t3_hrs , wind_speed , atmospheric_pressure , use_disc ) metadata = { 'source' : location . source , 'country' : location . country , 'city' : location . city } direct_norm_rad , diffuse_horiz_rad = cls . _get_data_collections ( dir_ir , diff_ir , metadata , timestep , is_leap_year ) return cls ( location , direct_norm_rad , diffuse_horiz_rad , timestep , is_leap_year ) | Create a wea object from climate data using the Zhang - Huang model . |
48,646 | def datetimes ( self ) : if self . timestep == 1 : return tuple ( dt . add_minute ( 30 ) for dt in self . direct_normal_irradiance . datetimes ) else : return self . direct_normal_irradiance . datetimes | Datetimes in wea file . |
48,647 | def global_horizontal_irradiance ( self ) : analysis_period = AnalysisPeriod ( timestep = self . timestep , is_leap_year = self . is_leap_year ) header_ghr = Header ( data_type = GlobalHorizontalIrradiance ( ) , unit = 'W/m2' , analysis_period = analysis_period , metadata = self . metadata ) glob_horiz = [ ] sp = Sunpath . from_location ( self . location ) sp . is_leap_year = self . is_leap_year for dt , dnr , dhr in zip ( self . datetimes , self . direct_normal_irradiance , self . diffuse_horizontal_irradiance ) : sun = sp . calculate_sun_from_date_time ( dt ) glob_horiz . append ( dhr + dnr * math . sin ( math . radians ( sun . altitude ) ) ) return HourlyContinuousCollection ( header_ghr , glob_horiz ) | Returns the global horizontal irradiance at each timestep . |
48,648 | def direct_horizontal_irradiance ( self ) : analysis_period = AnalysisPeriod ( timestep = self . timestep , is_leap_year = self . is_leap_year ) header_dhr = Header ( data_type = DirectHorizontalIrradiance ( ) , unit = 'W/m2' , analysis_period = analysis_period , metadata = self . metadata ) direct_horiz = [ ] sp = Sunpath . from_location ( self . location ) sp . is_leap_year = self . is_leap_year for dt , dnr in zip ( self . datetimes , self . direct_normal_irradiance ) : sun = sp . calculate_sun_from_date_time ( dt ) direct_horiz . append ( dnr * math . sin ( math . radians ( sun . altitude ) ) ) return HourlyContinuousCollection ( header_dhr , direct_horiz ) | Returns the direct irradiance on a horizontal surface at each timestep . |
48,649 | def _get_datetimes ( timestep , is_leap_year ) : hour_count = 8760 + 24 if is_leap_year else 8760 adjust_time = 30 if timestep == 1 else 0 return tuple ( DateTime . from_moy ( 60.0 * count / timestep + adjust_time , is_leap_year ) for count in xrange ( hour_count * timestep ) ) | List of datetimes based on timestep . |
48,650 | def _get_data_collections ( dnr_values , dhr_values , metadata , timestep , is_leap_year ) : analysis_period = AnalysisPeriod ( timestep = timestep , is_leap_year = is_leap_year ) dnr_header = Header ( data_type = DirectNormalIrradiance ( ) , unit = 'W/m2' , analysis_period = analysis_period , metadata = metadata ) direct_norm_rad = HourlyContinuousCollection ( dnr_header , dnr_values ) dhr_header = Header ( data_type = DiffuseHorizontalIrradiance ( ) , unit = 'W/m2' , analysis_period = analysis_period , metadata = metadata ) diffuse_horiz_rad = HourlyContinuousCollection ( dhr_header , dhr_values ) return direct_norm_rad , diffuse_horiz_rad | Return two data collections for Direct Normal Diffuse Horizontal |
48,651 | def get_irradiance_value ( self , month , day , hour ) : dt = DateTime ( month , day , hour , leap_year = self . is_leap_year ) count = int ( dt . hoy * self . timestep ) return self . direct_normal_irradiance [ count ] , self . diffuse_horizontal_irradiance [ count ] | Get direct and diffuse irradiance values for a point in time . |
48,652 | def get_irradiance_value_for_hoy ( self , hoy ) : count = int ( hoy * self . timestep ) return self . direct_normal_irradiance [ count ] , self . diffuse_horizontal_irradiance [ count ] | Get direct and diffuse irradiance values for an hoy . |
48,653 | def directional_irradiance ( self , altitude = 90 , azimuth = 180 , ground_reflectance = 0.2 , isotrophic = True ) : def pol2cart ( phi , theta ) : mult = math . cos ( theta ) x = math . sin ( phi ) * mult y = math . cos ( phi ) * mult z = math . sin ( theta ) return Vector3 ( x , y , z ) normal = pol2cart ( math . radians ( azimuth ) , math . radians ( altitude ) ) direct_irr , diffuse_irr , reflected_irr , total_irr = [ ] , [ ] , [ ] , [ ] sp = Sunpath . from_location ( self . location ) sp . is_leap_year = self . is_leap_year for dt , dnr , dhr in zip ( self . datetimes , self . direct_normal_irradiance , self . diffuse_horizontal_irradiance ) : sun = sp . calculate_sun_from_date_time ( dt ) sun_vec = pol2cart ( math . radians ( sun . azimuth ) , math . radians ( sun . altitude ) ) vec_angle = sun_vec . angle ( normal ) srf_dir = 0 if sun . altitude > 0 and vec_angle < math . pi / 2 : srf_dir = dnr * math . cos ( vec_angle ) if isotrophic is True : srf_dif = dhr * ( ( math . sin ( math . radians ( altitude ) ) / 2 ) + 0.5 ) else : y = max ( 0.45 , 0.55 + ( 0.437 * math . cos ( vec_angle ) ) + 0.313 * math . cos ( vec_angle ) * 0.313 * math . cos ( vec_angle ) ) srf_dif = self . dhr * ( y * ( math . sin ( math . radians ( abs ( 90 - altitude ) ) ) ) + math . cos ( math . radians ( abs ( 90 - altitude ) ) ) ) e_glob = dhr + dnr * math . cos ( math . radians ( 90 - sun . altitude ) ) srf_ref = e_glob * ground_reflectance * ( 0.5 - ( math . sin ( math . radians ( altitude ) ) / 2 ) ) direct_irr . append ( srf_dir ) diffuse_irr . append ( srf_dif ) reflected_irr . append ( srf_ref ) total_irr . append ( srf_dir + srf_dif + srf_ref ) a_per = AnalysisPeriod ( timestep = self . timestep , is_leap_year = self . is_leap_year ) direct_hea = diffuse_hea = reflected_hea = total_hea = Header ( Irradiance ( ) , 'W/m2' , a_per , self . metadata ) direct_irradiance = HourlyContinuousCollection ( direct_hea , direct_irr ) diffuse_irradiance = HourlyContinuousCollection ( diffuse_hea , diffuse_irr ) reflected_irradiance = HourlyContinuousCollection ( reflected_hea , reflected_irr ) total_irradiance = HourlyContinuousCollection ( total_hea , total_irr ) return total_irradiance , direct_irradiance , diffuse_irradiance , reflected_irradiance | Returns the irradiance components facing a given altitude and azimuth . |
48,654 | def header ( self ) : return "place %s\n" % self . location . city + "latitude %.2f\n" % self . location . latitude + "longitude %.2f\n" % - self . location . longitude + "time_zone %d\n" % ( - self . location . time_zone * 15 ) + "site_elevation %.1f\n" % self . location . elevation + "weather_data_file_units 1\n" | Wea header . |
48,655 | def write ( self , file_path , hoys = None , write_hours = False ) : if not file_path . lower ( ) . endswith ( '.wea' ) : file_path += '.wea' full_wea = False if not hoys : hoys = self . hoys full_wea = True lines = [ self . header ] if full_wea : for dir_rad , dif_rad , dt in zip ( self . direct_normal_irradiance , self . diffuse_horizontal_irradiance , self . datetimes ) : line = "%d %d %.3f %d %d\n" % ( dt . month , dt . day , dt . float_hour , dir_rad , dif_rad ) lines . append ( line ) else : for hoy in hoys : try : dir_rad , dif_rad = self . get_irradiance_value_for_hoy ( hoy ) except IndexError : print ( 'Warn: Wea data for {} is not available!' . format ( dt ) ) continue dt = DateTime . from_hoy ( hoy ) dt = dt . add_minute ( 30 ) if self . timestep == 1 else dt line = "%d %d %.3f %d %d\n" % ( dt . month , dt . day , dt . float_hour , dir_rad , dif_rad ) lines . append ( line ) file_data = '' . join ( lines ) write_to_file ( file_path , file_data , True ) if write_hours : hrs_file_path = file_path [ : - 4 ] + '.hrs' hrs_data = ',' . join ( str ( h ) for h in hoys ) + '\n' write_to_file ( hrs_file_path , hrs_data , True ) return file_path | Write the wea file . |
48,656 | def flatten ( input_list ) : for el in input_list : if isinstance ( el , collections . Iterable ) and not isinstance ( el , basestring ) : for sub in flatten ( el ) : yield sub else : yield el | Return a flattened genertor from an input list . |
48,657 | def unflatten ( guide , falttened_input ) : return [ unflatten ( sub_list , falttened_input ) if isinstance ( sub_list , list ) else next ( falttened_input ) for sub_list in guide ] | Unflatten a falttened generator . |
48,658 | def color ( self , value ) : assert self . _is_domain_set , "Domain is not set. Use self.domain to set the domain." if self . _ctype == 2 : try : return self . _colors [ self . _domain . index ( value ) ] except ValueError : raise ValueError ( "%s is not a valid input for ordinal type.\n" % str ( value ) + "List of valid values are %s" % ";" . join ( map ( str , self . _domain ) ) ) if value < self . _domain [ 0 ] : return self . _colors [ 0 ] if value > self . _domain [ - 1 ] : return self . _colors [ - 1 ] for count , d in enumerate ( self . _domain ) : if d <= value <= self . _domain [ count + 1 ] : if self . _ctype == 0 : return self . _cal_color ( value , count ) if self . _ctype == 1 : return self . _colors [ count + 1 ] | Return color for an input value . |
48,659 | def _cal_color ( self , value , color_index ) : range_min_p = self . _domain [ color_index ] range_p = self . _domain [ color_index + 1 ] - range_min_p try : factor = ( value - range_min_p ) / range_p except ZeroDivisionError : factor = 0 min_color = self . colors [ color_index ] max_color = self . colors [ color_index + 1 ] red = round ( factor * ( max_color . r - min_color . r ) + min_color . r ) green = round ( factor * ( max_color . g - min_color . g ) + min_color . g ) blue = round ( factor * ( max_color . b - min_color . b ) + min_color . b ) return Color ( red , green , blue ) | Blend between two colors based on input value . |
48,660 | def from_location ( cls , location , north_angle = 0 , daylight_saving_period = None ) : location = Location . from_location ( location ) return cls ( location . latitude , location . longitude , location . time_zone , north_angle , daylight_saving_period ) | Create a sun path from a LBlocation . |
48,661 | def latitude ( self , value ) : self . _latitude = math . radians ( float ( value ) ) assert - self . PI / 2 <= self . _latitude <= self . PI / 2 , "latitude value should be between -90..90." | Set latitude value . |
48,662 | def longitude ( self , value ) : self . _longitude = math . radians ( float ( value ) ) if abs ( ( value / 15.0 ) - self . time_zone ) > 1 : self . time_zone = value / 15.0 | Set longitude value in degrees . |
48,663 | def is_daylight_saving_hour ( self , datetime ) : if not self . daylight_saving_period : return False return self . daylight_saving_period . isTimeIncluded ( datetime . hoy ) | Check if a datetime is a daylight saving time . |
48,664 | def calculate_sun_from_date_time ( self , datetime , is_solar_time = False ) : if datetime . year != 2016 and self . is_leap_year : datetime = DateTime ( datetime . month , datetime . day , datetime . hour , datetime . minute , True ) sol_dec , eq_of_time = self . _calculate_solar_geometry ( datetime ) hour = datetime . float_hour is_daylight_saving = self . is_daylight_saving_hour ( datetime . hoy ) hour = hour + 1 if self . is_daylight_saving_hour ( datetime . hoy ) else hour sol_time = self . _calculate_solar_time ( hour , eq_of_time , is_solar_time ) * 60 if sol_time / 4 < 0 : hour_angle = sol_time / 4 + 180 else : hour_angle = sol_time / 4 - 180 zenith = math . degrees ( math . acos ( math . sin ( self . _latitude ) * math . sin ( math . radians ( sol_dec ) ) + math . cos ( self . _latitude ) * math . cos ( math . radians ( sol_dec ) ) * math . cos ( math . radians ( hour_angle ) ) ) ) altitude = 90 - zenith if altitude > 85 : atmos_refraction = 0 else : if altitude > 5 : atmos_refraction = 58.1 / math . tan ( math . radians ( altitude ) ) - 0.07 / ( math . tan ( math . radians ( altitude ) ) ) ** 3 + 0.000086 / ( math . tan ( math . radians ( altitude ) ) ) ** 5 else : if altitude > - 0.575 : atmos_refraction = 1735 + altitude * ( - 518.2 + altitude * ( 103.4 + altitude * ( - 12.79 + altitude * 0.711 ) ) ) else : atmos_refraction = - 20.772 / math . tan ( math . radians ( altitude ) ) atmos_refraction /= 3600 altitude += atmos_refraction if hour_angle > 0 : azimuth = ( math . degrees ( math . acos ( ( ( math . sin ( self . _latitude ) * math . cos ( math . radians ( zenith ) ) ) - math . sin ( math . radians ( sol_dec ) ) ) / ( math . cos ( self . _latitude ) * math . sin ( math . radians ( zenith ) ) ) ) ) + 180 ) % 360 else : azimuth = ( 540 - math . degrees ( math . acos ( ( ( math . sin ( self . _latitude ) * math . cos ( math . radians ( zenith ) ) ) - math . sin ( math . radians ( sol_dec ) ) ) / ( math . cos ( self . _latitude ) * math . sin ( math . radians ( zenith ) ) ) ) ) ) % 360 altitude = math . radians ( altitude ) azimuth = math . radians ( azimuth ) return Sun ( datetime , altitude , azimuth , is_solar_time , is_daylight_saving , self . north_angle ) | Get Sun for an hour of the year . |
48,665 | def calculate_sunrise_sunset ( self , month , day , depression = 0.833 , is_solar_time = False ) : datetime = DateTime ( month , day , hour = 12 , leap_year = self . is_leap_year ) return self . calculate_sunrise_sunset_from_datetime ( datetime , depression , is_solar_time ) | Calculate sunrise noon and sunset . |
48,666 | def calculate_sunrise_sunset_from_datetime ( self , datetime , depression = 0.833 , is_solar_time = False ) : if datetime . year != 2016 and self . is_leap_year : datetime = DateTime ( datetime . month , datetime . day , datetime . hour , datetime . minute , True ) sol_dec , eq_of_time = self . _calculate_solar_geometry ( datetime ) if is_solar_time : noon = .5 else : noon = ( 720 - 4 * math . degrees ( self . _longitude ) - eq_of_time + self . time_zone * 60 ) / 1440.0 try : sunrise_hour_angle = self . _calculate_sunrise_hour_angle ( sol_dec , depression ) except ValueError : noon = 24 * noon return { "sunrise" : None , "noon" : DateTime ( datetime . month , datetime . day , * self . _calculate_hour_and_minute ( noon ) , leap_year = self . is_leap_year ) , "sunset" : None } else : sunrise = noon - sunrise_hour_angle * 4 / 1440.0 sunset = noon + sunrise_hour_angle * 4 / 1440.0 noon = 24 * noon sunrise = 24 * sunrise sunset = 24 * sunset return { "sunrise" : DateTime ( datetime . month , datetime . day , * self . _calculate_hour_and_minute ( sunrise ) , leap_year = self . is_leap_year ) , "noon" : DateTime ( datetime . month , datetime . day , * self . _calculate_hour_and_minute ( noon ) , leap_year = self . is_leap_year ) , "sunset" : DateTime ( datetime . month , datetime . day , * self . _calculate_hour_and_minute ( sunset ) , leap_year = self . is_leap_year ) } | Calculate sunrise sunset and noon for a day of year . |
48,667 | def _calculate_sunrise_hour_angle ( self , solar_dec , depression = 0.833 ) : hour_angle_arg = math . degrees ( math . acos ( math . cos ( math . radians ( 90 + depression ) ) / ( math . cos ( math . radians ( self . latitude ) ) * math . cos ( math . radians ( solar_dec ) ) ) - math . tan ( math . radians ( self . latitude ) ) * math . tan ( math . radians ( solar_dec ) ) ) ) return hour_angle_arg | Calculate hour angle for sunrise time in degrees . |
48,668 | def _calculate_solar_time ( self , hour , eq_of_time , is_solar_time ) : if is_solar_time : return hour return ( ( hour * 60 + eq_of_time + 4 * math . degrees ( self . _longitude ) - 60 * self . time_zone ) % 1440 ) / 60 | Calculate Solar time for an hour . |
48,669 | def _calculate_solar_time_by_doy ( self , hour , doy ) : raise NotImplementedError ( ) return ( 0.170 * math . sin ( ( 4 * math . pi / 373 ) * ( doy - 80 ) ) - 0.129 * math . sin ( ( 2 * math . pi / 355 ) * ( doy - 8 ) ) + 12 * ( - ( 15 * self . time_zone ) - self . longitude ) / math . pi ) | This is how radiance calculates solar time . |
48,670 | def draw_sunpath ( self , hoys = None , origin = None , scale = 1 , sun_scale = 1 , annual = True , rem_night = True ) : assert ladybug . isplus , '"draw_sunpath" method can only be used in the [+] libraries.' hoys = hoys or ( ) origin = origin or ( 0 , 0 , 0 ) try : origin = tuple ( origin ) except TypeError as e : try : origin = origin . X , origin . Y , origin . Z except AttributeError : raise TypeError ( str ( e ) ) scale = scale or 1 sun_scale = sun_scale or 1 assert annual or hoys , 'For daily sunpath you need to provide at least one hour.' radius = 200 * scale base_curves = plus . base_curves ( origin , radius , self . north_angle ) if annual : asuns = self . _analemma_suns ( ) analemma_curves = plus . analemma_curves ( asuns , origin , radius ) else : analemma_curves = ( ) if hoys : suns = tuple ( self . calculate_sun_from_hoy ( hour ) for hour in hoys ) else : suns = ( ) if rem_night : suns = tuple ( sun for sun in suns if sun . is_during_day ) sun_geos = plus . sun_geometry ( suns , origin , radius ) if annual : dts = ( DateTime ( m , 21 ) for m in xrange ( 1 , 13 ) ) else : dts = ( sun . datetime for sun in suns ) dsuns = self . _daily_suns ( dts ) daily_curves = plus . daily_curves ( dsuns , origin , radius ) SPGeo = namedtuple ( 'SunpathGeo' , ( 'compass_curves' , 'analemma_curves' , 'daily_curves' , 'suns' , 'sun_geos' ) ) return SPGeo ( base_curves , analemma_curves , daily_curves , suns , sun_geos ) | Create sunpath geometry . \ This method should only be used from the + libraries . |
48,671 | def _analemma_position ( self , hour ) : low = self . calculate_sun ( 12 , 21 , hour ) . is_during_day high = self . calculate_sun ( 6 , 21 , hour ) . is_during_day if low and high : return 1 elif low or high : return 0 else : return - 1 | Check what the analemma position is for an hour . |
48,672 | def _analemma_suns ( self ) : for h in xrange ( 0 , 24 ) : if self . _analemma_position ( h ) < 0 : continue elif self . _analemma_position ( h ) == 0 : chours = [ ] prevhour = self . latitude <= 0 num_of_days = 8760 if not self . is_leap_year else 8760 + 24 for hoy in xrange ( h , num_of_days , 24 ) : thishour = self . calculate_sun_from_hoy ( hoy ) . is_during_day if thishour != prevhour : if not thishour : hoy -= 24 dt = DateTime . from_hoy ( hoy , self . is_leap_year ) chours . append ( ( dt . month , dt . day , dt . hour ) ) prevhour = thishour tt = [ ] for hcount in range ( int ( len ( chours ) / 2 ) ) : st = chours [ 2 * hcount ] en = chours [ 2 * hcount + 1 ] if self . latitude >= 0 : tt = [ self . calculate_sun ( * st ) ] + [ self . calculate_sun ( st [ 0 ] , d , h ) for d in xrange ( st [ 1 ] + 1 , 29 , 7 ) ] + [ self . calculate_sun ( m , d , h ) for m in xrange ( st [ 0 ] + 1 , en [ 0 ] ) for d in xrange ( 3 , 29 , 7 ) ] + [ self . calculate_sun ( en [ 0 ] , d , h ) for d in xrange ( 3 , en [ 1 ] , 7 ) ] + [ self . calculate_sun ( * en ) ] else : tt = [ self . calculate_sun ( * en ) ] + [ self . calculate_sun ( en [ 0 ] , d , h ) for d in xrange ( en [ 1 ] + 1 , 29 , 7 ) ] + [ self . calculate_sun ( m , d , h ) for m in xrange ( en [ 0 ] + 1 , 13 ) for d in xrange ( 3 , 29 , 7 ) ] + [ self . calculate_sun ( m , d , h ) for m in xrange ( 1 , st [ 0 ] ) for d in xrange ( 3 , 29 , 7 ) ] + [ self . calculate_sun ( st [ 0 ] , d , h ) for d in xrange ( 3 , st [ 1 ] , 7 ) ] + [ self . calculate_sun ( * st ) ] yield tt else : yield tuple ( self . calculate_sun ( ( m % 12 ) + 1 , d , h ) for m in xrange ( 0 , 13 ) for d in ( 7 , 14 , 21 ) ) [ : - 2 ] | Calculate times that should be used for drawing analemma_curves . |
48,673 | def _daily_suns ( self , datetimes ) : for dt in datetimes : nss = self . calculate_sunrise_sunset ( dt . month , dt . day ) dts = tuple ( nss [ k ] for k in ( 'sunrise' , 'noon' , 'sunset' ) ) if dts [ 0 ] is None : yield ( self . calculate_sun ( dt . month , dt . day , h ) for h in ( 0 , 12 , 15 ) ) , False else : yield ( self . calculate_sun_from_date_time ( dt ) for dt in dts ) , True | Get sun curve for multiple days of the year . |
48,674 | def _calculate_sun_vector ( self ) : z_axis = Vector3 ( 0. , 0. , - 1. ) x_axis = Vector3 ( 1. , 0. , 0. ) north_vector = Vector3 ( 0. , 1. , 0. ) _sun_vector = north_vector . rotate_around ( x_axis , self . altitude_in_radians ) . rotate_around ( z_axis , self . azimuth_in_radians ) . rotate_around ( z_axis , math . radians ( - 1 * self . north_angle ) ) _sun_vector . normalize ( ) try : _sun_vector . flip ( ) except AttributeError : _sun_vector = Vector3 ( - 1 * _sun_vector . x , - 1 * _sun_vector . y , - 1 * _sun_vector . z ) self . _sun_vector = _sun_vector | Calculate sun vector for this sun . |
48,675 | def from_json ( cls , data ) : required_keys = ( 'location' , 'design_days' ) for key in required_keys : assert key in data , 'Required key "{}" is missing!' . format ( key ) return cls ( Location . from_json ( data [ 'location' ] ) , [ DesignDay . from_json ( des_day ) for des_day in data [ 'design_days' ] ] ) | Create a DDY from a dictionary . |
48,676 | def from_ddy_file ( cls , file_path ) : if not os . path . isfile ( file_path ) : raise ValueError ( 'Cannot find a .ddy file at {}' . format ( file_path ) ) if not file_path . lower ( ) . endswith ( '.ddy' ) : raise ValueError ( 'DDY file does not have a .ddy extension.' ) try : iron_python = True if platform . python_implementation ( ) == 'IronPython' else False except Exception : iron_python = True if iron_python : ddywin = codecs . open ( file_path , 'r' ) else : ddywin = codecs . open ( file_path , 'r' , encoding = 'utf-8' , errors = 'ignore' ) try : ddytxt = ddywin . read ( ) location_format = re . compile ( r"(Site:Location,(.|\n)*?((;\s*!)|(;\s*\n)|(;\n)))" ) design_day_format = re . compile ( r"(SizingPeriod:DesignDay,(.|\n)*?((;\s*!)|(;\s*\n)|(;\n)))" ) location_matches = location_format . findall ( ddytxt ) des_day_matches = design_day_format . findall ( ddytxt ) except Exception as e : import traceback raise Exception ( '{}\n{}' . format ( e , traceback . format_exc ( ) ) ) else : assert len ( location_matches ) > 0 , 'No location objects found ' 'in .ddy file.' location = Location . from_location ( location_matches [ 0 ] [ 0 ] ) design_days = [ DesignDay . from_ep_string ( match [ 0 ] , location ) for match in des_day_matches ] finally : ddywin . close ( ) cls_ = cls ( location , design_days ) cls_ . _file_path = os . path . normpath ( file_path ) return cls_ | Initalize from a ddy file object from an existing ddy file . |
48,677 | def save ( self , file_path ) : data = self . location . ep_style_location_string + '\n\n' for d_day in self . design_days : data = data + d_day . ep_style_string + '\n\n' write_to_file ( file_path , data , True ) | Save ddy object as a . ddy file . |
48,678 | def filter_by_keyword ( self , keyword ) : filtered_days = [ ] for des_day in self . design_days : if keyword in des_day . name : filtered_days . append ( des_day ) return filtered_days | Return a list of ddys that have a certain keyword in their name . |
48,679 | def from_json ( cls , data ) : required_keys = ( 'name' , 'day_type' , 'location' , 'dry_bulb_condition' , 'humidity_condition' , 'wind_condition' , 'sky_condition' ) for key in required_keys : assert key in data , 'Required key "{}" is missing!' . format ( key ) return cls ( data [ 'name' ] , data [ 'day_type' ] , Location . from_json ( data [ 'location' ] ) , DryBulbCondition . from_json ( data [ 'dry_bulb_condition' ] ) , HumidityCondition . from_json ( data [ 'humidity_condition' ] ) , WindCondition . from_json ( data [ 'wind_condition' ] ) , SkyCondition . from_json ( data [ 'sky_condition' ] ) ) | Create a Design Day from a dictionary . |
48,680 | def from_design_day_properties ( cls , name , day_type , location , analysis_period , dry_bulb_max , dry_bulb_range , humidity_type , humidity_value , barometric_p , wind_speed , wind_dir , sky_model , sky_properties ) : dry_bulb_condition = DryBulbCondition ( dry_bulb_max , dry_bulb_range ) humidity_condition = HumidityCondition ( humidity_type , humidity_value , barometric_p ) wind_condition = WindCondition ( wind_speed , wind_dir ) if sky_model == 'ASHRAEClearSky' : sky_condition = OriginalClearSkyCondition . from_analysis_period ( analysis_period , sky_properties [ 0 ] ) elif sky_model == 'ASHRAETau' : sky_condition = RevisedClearSkyCondition . from_analysis_period ( analysis_period , sky_properties [ 0 ] , sky_properties [ - 1 ] ) return cls ( name , day_type , location , dry_bulb_condition , humidity_condition , wind_condition , sky_condition ) | Create a design day object from various key properties . |
48,681 | def analysis_period ( self ) : return AnalysisPeriod ( self . sky_condition . month , self . sky_condition . day_of_month , 0 , self . sky_condition . month , self . sky_condition . day_of_month , 23 ) | The analysisperiod of the design day . |
48,682 | def hourly_dew_point ( self ) : dpt_data = self . _humidity_condition . hourly_dew_point_values ( self . _dry_bulb_condition ) return self . _get_daily_data_collections ( temperature . DewPointTemperature ( ) , 'C' , dpt_data ) | A data collection containing hourly dew points over they day . |
48,683 | def hourly_relative_humidity ( self ) : dpt_data = self . _humidity_condition . hourly_dew_point_values ( self . _dry_bulb_condition ) rh_data = [ rel_humid_from_db_dpt ( x , y ) for x , y in zip ( self . _dry_bulb_condition . hourly_values , dpt_data ) ] return self . _get_daily_data_collections ( fraction . RelativeHumidity ( ) , '%' , rh_data ) | A data collection containing hourly relative humidity over they day . |
48,684 | def hourly_solar_radiation ( self ) : dir_norm , diff_horiz , glob_horiz = self . _sky_condition . radiation_values ( self . _location ) dir_norm_data = self . _get_daily_data_collections ( energyintensity . DirectNormalRadiation ( ) , 'Wh/m2' , dir_norm ) diff_horiz_data = self . _get_daily_data_collections ( energyintensity . DiffuseHorizontalRadiation ( ) , 'Wh/m2' , diff_horiz ) glob_horiz_data = self . _get_daily_data_collections ( energyintensity . GlobalHorizontalRadiation ( ) , 'Wh/m2' , glob_horiz ) return dir_norm_data , diff_horiz_data , glob_horiz_data | Three data collections containing hourly direct normal diffuse horizontal and global horizontal radiation . |
48,685 | def _get_daily_data_collections ( self , data_type , unit , values ) : data_header = Header ( data_type = data_type , unit = unit , analysis_period = self . analysis_period , metadata = { 'source' : self . _location . source , 'country' : self . _location . country , 'city' : self . _location . city } ) return HourlyContinuousCollection ( data_header , values ) | Return an empty data collection . |
48,686 | def hourly_values ( self ) : return [ self . _dry_bulb_max - self . _dry_bulb_range * x for x in self . temp_multipliers ] | A list of temperature values for each hour over the design day . |
48,687 | def to_json ( self ) : return { 'dry_bulb_max' : self . dry_bulb_max , 'dry_bulb_range' : self . dry_bulb_range , 'modifier_type' : self . modifier_type , 'modifier_schedule' : self . modifier_schedule } | Convert the Dry Bulb Condition to a dictionary . |
48,688 | def from_json ( cls , data ) : required_keys = ( 'hum_type' , 'hum_value' ) optional_keys = { 'barometric_pressure' : 101325 , 'schedule' : '' , 'wet_bulb_range' : '' } for key in required_keys : assert key in data , 'Required key "{}" is missing!' . format ( key ) for key , val in optional_keys . items ( ) : if key not in data : data [ key ] = val return cls ( data [ 'hum_type' ] , data [ 'hum_value' ] , data [ 'barometric_pressure' ] , data [ 'schedule' ] , data [ 'wet_bulb_range' ] ) | Create a Humidity Condition from a dictionary . |
48,689 | def to_json ( self ) : return { 'hum_type' : self . hum_type , 'hum_value' : self . hum_value , 'barometric_pressure' : self . barometric_pressure , 'schedule' : self . schedule , 'wet_bulb_range' : self . wet_bulb_range , } | Convert the Humidity Condition to a dictionary . |
48,690 | def from_json ( cls , data ) : optional_keys = { 'wind_direction' : 0 , 'rain' : False , 'snow_on_ground' : False } assert 'wind_speed' in data , 'Required key "wind_speed" is missing!' for key , val in optional_keys . items ( ) : if key not in data : data [ key ] = val return cls ( data [ 'wind_speed' ] , data [ 'wind_direction' ] , data [ 'rain' ] , data [ 'snow_on_ground' ] ) | Create a Wind Condition from a dictionary . |
48,691 | def to_json ( self ) : return { 'wind_speed' : self . wind_speed , 'wind_direction' : self . wind_direction , 'rain' : self . rain , 'snow_on_ground' : self . snow_on_ground } | Convert the Wind Condition to a dictionary . |
48,692 | def _get_datetimes ( self , timestep = 1 ) : start_moy = DateTime ( self . _month , self . _day_of_month ) . moy if timestep == 1 : start_moy = start_moy + 30 num_moys = 24 * timestep return tuple ( DateTime . from_moy ( start_moy + ( i * ( 1 / timestep ) * 60 ) ) for i in xrange ( num_moys ) ) | List of datetimes based on design day date and timestep . |
48,693 | def from_analysis_period ( cls , analysis_period , clearness = 1 , daylight_savings_indicator = 'No' ) : _check_analysis_period ( analysis_period ) return cls ( analysis_period . st_month , analysis_period . st_day , clearness , daylight_savings_indicator ) | Initialize a OriginalClearSkyCondition from an analysis_period |
48,694 | def radiation_values ( self , location , timestep = 1 ) : sp = Sunpath . from_location ( location ) altitudes = [ ] dates = self . _get_datetimes ( timestep ) for t_date in dates : sun = sp . calculate_sun_from_date_time ( t_date ) altitudes . append ( sun . altitude ) dir_norm , diff_horiz = ashrae_clear_sky ( altitudes , self . _month , self . _clearness ) glob_horiz = [ dhr + dnr * math . sin ( math . radians ( alt ) ) for alt , dnr , dhr in zip ( altitudes , dir_norm , diff_horiz ) ] return dir_norm , diff_horiz , glob_horiz | Lists of driect normal diffuse horiz and global horiz rad at each timestep . |
48,695 | def from_analysis_period ( cls , analysis_period , tau_b , tau_d , daylight_savings_indicator = 'No' ) : _check_analysis_period ( analysis_period ) return cls ( analysis_period . st_month , analysis_period . st_day , tau_b , tau_d , daylight_savings_indicator ) | Initialize a RevisedClearSkyCondition from an analysis_period |
48,696 | def convert_to_unit ( self , unit ) : self . _values = self . _header . data_type . to_unit ( self . _values , unit , self . _header . unit ) self . _header . _unit = unit | Convert the Data Collection to the input unit . |
48,697 | def convert_to_ip ( self ) : self . _values , self . _header . _unit = self . _header . data_type . to_ip ( self . _values , self . _header . unit ) | Convert the Data Collection to IP units . |
48,698 | def convert_to_si ( self ) : self . _values , self . _header . _unit = self . _header . data_type . to_si ( self . _values , self . _header . unit ) | Convert the Data Collection to SI units . |
48,699 | def to_unit ( self , unit ) : new_data_c = self . duplicate ( ) new_data_c . convert_to_unit ( unit ) return new_data_c | Return a Data Collection in the input unit . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.