docstring
stringlengths
52
499
function
stringlengths
67
35.2k
__index_level_0__
int64
52.6k
1.16M
Corresponds to IDD Field `relative_humidity` Args: value (int): value for IDD Field `relative_humidity` value >= 0 value <= 110 Missing value: 999 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def relative_humidity(self, value=999): if value is not None: try: value = int(value) except ValueError: raise ValueError('value {} need to be of type int ' 'for field `relative_humidity`'.format(value)) if value < 0: raise ValueError('value need to be greater or equal 0 ' 'for field `relative_humidity`') if value > 110: raise ValueError('value need to be smaller 110 ' 'for field `relative_humidity`') self._relative_humidity = value
797,527
Corresponds to IDD Field `atmospheric_station_pressure` Args: value (int): value for IDD Field `atmospheric_station_pressure` Unit: Pa value > 31000 value < 120000 Missing value: 999999 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def atmospheric_station_pressure(self, value=999999): if value is not None: try: value = int(value) except ValueError: raise ValueError( 'value {} need to be of type int ' 'for field `atmospheric_station_pressure`'.format(value)) if value <= 31000: raise ValueError('value need to be greater 31000 ' 'for field `atmospheric_station_pressure`') if value >= 120000: raise ValueError('value need to be smaller 120000 ' 'for field `atmospheric_station_pressure`') self._atmospheric_station_pressure = value
797,528
Corresponds to IDD Field `extraterrestrial_horizontal_radiation` Args: value (float): value for IDD Field `extraterrestrial_horizontal_radiation` Unit: Wh/m2 value >= 0.0 Missing value: 9999.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def extraterrestrial_horizontal_radiation(self, value=9999.0): if value is not None: try: value = float(value) except ValueError: raise ValueError( 'value {} need to be of type float ' 'for field `extraterrestrial_horizontal_radiation`'.format(value)) if value < 0.0: raise ValueError( 'value need to be greater or equal 0.0 ' 'for field `extraterrestrial_horizontal_radiation`') self._extraterrestrial_horizontal_radiation = value
797,529
Corresponds to IDD Field `extraterrestrial_direct_normal_radiation` Args: value (float): value for IDD Field `extraterrestrial_direct_normal_radiation` Unit: Wh/m2 value >= 0.0 Missing value: 9999.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def extraterrestrial_direct_normal_radiation(self, value=9999.0): if value is not None: try: value = float(value) except ValueError: raise ValueError( 'value {} need to be of type float ' 'for field `extraterrestrial_direct_normal_radiation`'.format(value)) if value < 0.0: raise ValueError( 'value need to be greater or equal 0.0 ' 'for field `extraterrestrial_direct_normal_radiation`') self._extraterrestrial_direct_normal_radiation = value
797,530
Corresponds to IDD Field `horizontal_infrared_radiation_intensity` Args: value (float): value for IDD Field `horizontal_infrared_radiation_intensity` Unit: Wh/m2 value >= 0.0 Missing value: 9999.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def horizontal_infrared_radiation_intensity(self, value=9999.0): if value is not None: try: value = float(value) except ValueError: raise ValueError( 'value {} need to be of type float ' 'for field `horizontal_infrared_radiation_intensity`'.format(value)) if value < 0.0: raise ValueError( 'value need to be greater or equal 0.0 ' 'for field `horizontal_infrared_radiation_intensity`') self._horizontal_infrared_radiation_intensity = value
797,531
Corresponds to IDD Field `global_horizontal_radiation` Args: value (float): value for IDD Field `global_horizontal_radiation` Unit: Wh/m2 value >= 0.0 Missing value: 9999.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def global_horizontal_radiation(self, value=9999.0): if value is not None: try: value = float(value) except ValueError: raise ValueError( 'value {} need to be of type float ' 'for field `global_horizontal_radiation`'.format(value)) if value < 0.0: raise ValueError('value need to be greater or equal 0.0 ' 'for field `global_horizontal_radiation`') self._global_horizontal_radiation = value
797,532
Corresponds to IDD Field `direct_normal_radiation` Args: value (float): value for IDD Field `direct_normal_radiation` Unit: Wh/m2 value >= 0.0 Missing value: 9999.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def direct_normal_radiation(self, value=9999.0): if value is not None: try: value = float(value) except ValueError: raise ValueError( 'value {} need to be of type float ' 'for field `direct_normal_radiation`'.format(value)) if value < 0.0: raise ValueError('value need to be greater or equal 0.0 ' 'for field `direct_normal_radiation`') self._direct_normal_radiation = value
797,533
Corresponds to IDD Field `diffuse_horizontal_radiation` Args: value (float): value for IDD Field `diffuse_horizontal_radiation` Unit: Wh/m2 value >= 0.0 Missing value: 9999.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def diffuse_horizontal_radiation(self, value=9999.0): if value is not None: try: value = float(value) except ValueError: raise ValueError( 'value {} need to be of type float ' 'for field `diffuse_horizontal_radiation`'.format(value)) if value < 0.0: raise ValueError('value need to be greater or equal 0.0 ' 'for field `diffuse_horizontal_radiation`') self._diffuse_horizontal_radiation = value
797,534
Corresponds to IDD Field `global_horizontal_illuminance` will be missing if >= 999900 Args: value (float): value for IDD Field `global_horizontal_illuminance` Unit: lux value >= 0.0 Missing value: 999999.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def global_horizontal_illuminance(self, value=999999.0): if value is not None: try: value = float(value) except ValueError: raise ValueError( 'value {} need to be of type float ' 'for field `global_horizontal_illuminance`'.format(value)) if value < 0.0: raise ValueError('value need to be greater or equal 0.0 ' 'for field `global_horizontal_illuminance`') self._global_horizontal_illuminance = value
797,535
Corresponds to IDD Field `direct_normal_illuminance` will be missing if >= 999900 Args: value (float): value for IDD Field `direct_normal_illuminance` Unit: lux value >= 0.0 Missing value: 999999.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def direct_normal_illuminance(self, value=999999.0): if value is not None: try: value = float(value) except ValueError: raise ValueError( 'value {} need to be of type float ' 'for field `direct_normal_illuminance`'.format(value)) if value < 0.0: raise ValueError('value need to be greater or equal 0.0 ' 'for field `direct_normal_illuminance`') self._direct_normal_illuminance = value
797,536
Corresponds to IDD Field `diffuse_horizontal_illuminance` will be missing if >= 999900 Args: value (float): value for IDD Field `diffuse_horizontal_illuminance` Unit: lux value >= 0.0 Missing value: 999999.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def diffuse_horizontal_illuminance(self, value=999999.0): if value is not None: try: value = float(value) except ValueError: raise ValueError( 'value {} need to be of type float ' 'for field `diffuse_horizontal_illuminance`'.format(value)) if value < 0.0: raise ValueError('value need to be greater or equal 0.0 ' 'for field `diffuse_horizontal_illuminance`') self._diffuse_horizontal_illuminance = value
797,537
Corresponds to IDD Field `zenith_luminance` will be missing if >= 9999 Args: value (float): value for IDD Field `zenith_luminance` Unit: Cd/m2 value >= 0.0 Missing value: 9999.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def zenith_luminance(self, value=9999.0): if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `zenith_luminance`'.format(value)) if value < 0.0: raise ValueError('value need to be greater or equal 0.0 ' 'for field `zenith_luminance`') self._zenith_luminance = value
797,538
Corresponds to IDD Field `wind_direction` Args: value (float): value for IDD Field `wind_direction` Unit: degrees value >= 0.0 value <= 360.0 Missing value: 999.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def wind_direction(self, value=999.0): if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `wind_direction`'.format(value)) if value < 0.0: raise ValueError('value need to be greater or equal 0.0 ' 'for field `wind_direction`') if value > 360.0: raise ValueError('value need to be smaller 360.0 ' 'for field `wind_direction`') self._wind_direction = value
797,539
Corresponds to IDD Field `wind_speed` Args: value (float): value for IDD Field `wind_speed` Unit: m/s value >= 0.0 value <= 40.0 Missing value: 999.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def wind_speed(self, value=999.0): if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `wind_speed`'.format(value)) if value < 0.0: raise ValueError('value need to be greater or equal 0.0 ' 'for field `wind_speed`') if value > 40.0: raise ValueError('value need to be smaller 40.0 ' 'for field `wind_speed`') self._wind_speed = value
797,540
Corresponds to IDD Field `visibility` This is the value for visibility in km. (Horizontal visibility at the time indicated.) Args: value (float): value for IDD Field `visibility` Unit: km Missing value: 9999.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def visibility(self, value=9999.0): if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `visibility`'.format(value)) self._visibility = value
797,543
Corresponds to IDD Field `present_weather_codes` Args: value (int): value for IDD Field `present_weather_codes` if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def present_weather_codes(self, value=None): if value is not None: try: value = int(value) except ValueError: raise ValueError( 'value {} need to be of type int ' 'for field `present_weather_codes`'.format(value)) self._present_weather_codes = value
797,546
Corresponds to IDD Field `precipitable_water` Args: value (float): value for IDD Field `precipitable_water` Unit: mm Missing value: 999.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def precipitable_water(self, value=999.0): if value is not None: try: value = float(value) except ValueError: raise ValueError( 'value {} need to be of type float ' 'for field `precipitable_water`'.format(value)) self._precipitable_water = value
797,547
Corresponds to IDD Field `aerosol_optical_depth` Args: value (float): value for IDD Field `aerosol_optical_depth` Unit: thousandths Missing value: 0.999 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def aerosol_optical_depth(self, value=0.999): if value is not None: try: value = float(value) except ValueError: raise ValueError( 'value {} need to be of type float ' 'for field `aerosol_optical_depth`'.format(value)) self._aerosol_optical_depth = value
797,548
Corresponds to IDD Field `snow_depth` Args: value (float): value for IDD Field `snow_depth` Unit: cm Missing value: 999.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def snow_depth(self, value=999.0): if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `snow_depth`'.format(value)) self._snow_depth = value
797,549
Corresponds to IDD Field `days_since_last_snowfall` Args: value (int): value for IDD Field `days_since_last_snowfall` Missing value: 99 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def days_since_last_snowfall(self, value=99): if value is not None: try: value = int(value) except ValueError: raise ValueError( 'value {} need to be of type int ' 'for field `days_since_last_snowfall`'.format(value)) self._days_since_last_snowfall = value
797,550
Corresponds to IDD Field `albedo` Args: value (float): value for IDD Field `albedo` Missing value: 999.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def albedo(self, value=999.0): if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `albedo`'.format(value)) self._albedo = value
797,551
Corresponds to IDD Field `liquid_precipitation_depth` Args: value (float): value for IDD Field `liquid_precipitation_depth` Unit: mm Missing value: 999.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def liquid_precipitation_depth(self, value=999.0): if value is not None: try: value = float(value) except ValueError: raise ValueError( 'value {} need to be of type float ' 'for field `liquid_precipitation_depth`'.format(value)) self._liquid_precipitation_depth = value
797,552
Corresponds to IDD Field `liquid_precipitation_quantity` Args: value (float): value for IDD Field `liquid_precipitation_quantity` Unit: hr Missing value: 99.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
def liquid_precipitation_quantity(self, value=99.0): if value is not None: try: value = float(value) except ValueError: raise ValueError( 'value {} need to be of type float ' 'for field `liquid_precipitation_quantity`'.format(value)) self._liquid_precipitation_quantity = value
797,553
Exports object to its string representation. Args: top (bool): if True appends `internal_name` before values. All non list objects should be exported with value top=True, all list objects, that are embedded in as fields inlist objects should be exported with `top`=False Returns: str: The objects string representation
def export(self, top=True): out = [] if top: out.append(self._internal_name) out.append(self._to_str(self.year)) out.append(self._to_str(self.month)) out.append(self._to_str(self.day)) out.append(self._to_str(self.hour)) out.append(self._to_str(self.minute)) out.append(self._to_str(self.data_source_and_uncertainty_flags)) out.append(self._to_str(self.dry_bulb_temperature)) out.append(self._to_str(self.dew_point_temperature)) out.append(self._to_str(self.relative_humidity)) out.append(self._to_str(self.atmospheric_station_pressure)) out.append(self._to_str(self.extraterrestrial_horizontal_radiation)) out.append(self._to_str(self.extraterrestrial_direct_normal_radiation)) out.append(self._to_str(self.horizontal_infrared_radiation_intensity)) out.append(self._to_str(self.global_horizontal_radiation)) out.append(self._to_str(self.direct_normal_radiation)) out.append(self._to_str(self.diffuse_horizontal_radiation)) out.append(self._to_str(self.global_horizontal_illuminance)) out.append(self._to_str(self.direct_normal_illuminance)) out.append(self._to_str(self.diffuse_horizontal_illuminance)) out.append(self._to_str(self.zenith_luminance)) out.append(self._to_str(self.wind_direction)) out.append(self._to_str(self.wind_speed)) out.append(self._to_str(self.total_sky_cover)) out.append(self._to_str(self.opaque_sky_cover)) out.append(self._to_str(self.visibility)) out.append(self._to_str(self.ceiling_height)) out.append(self._to_str(self.present_weather_observation)) out.append(self._to_str(self.present_weather_codes)) out.append(self._to_str(self.precipitable_water)) out.append(self._to_str(self.aerosol_optical_depth)) out.append(self._to_str(self.snow_depth)) out.append(self._to_str(self.days_since_last_snowfall)) out.append(self._to_str(self.albedo)) out.append(self._to_str(self.liquid_precipitation_depth)) out.append(self._to_str(self.liquid_precipitation_quantity)) return ",".join(out)
797,554
Appends weather data. Args: data (WeatherData): weather data object
def add_weatherdata(self, data): if not isinstance(data, WeatherData): raise ValueError('Weather data need to be of type WeatherData') self._data["WEATHER DATA"].append(data)
797,556
Save WeatherData in EPW format to path. Args: path (str): path where EPW file should be saved
def save(self, path, check=True): with open(path, 'w') as f: if check: if ("LOCATION" not in self._data or self._data["LOCATION"] is None): raise ValueError('location is not valid.') if ("DESIGN CONDITIONS" not in self._data or self._data["DESIGN CONDITIONS"] is None): raise ValueError('design_conditions is not valid.') if ("TYPICAL/EXTREME PERIODS" not in self._data or self._data["TYPICAL/EXTREME PERIODS"] is None): raise ValueError( 'typical_or_extreme_periods is not valid.') if ("GROUND TEMPERATURES" not in self._data or self._data["GROUND TEMPERATURES"] is None): raise ValueError('ground_temperatures is not valid.') if ("HOLIDAYS/DAYLIGHT SAVINGS" not in self._data or self._data["HOLIDAYS/DAYLIGHT SAVINGS"] is None): raise ValueError( 'holidays_or_daylight_savings is not valid.') if ("COMMENTS 1" not in self._data or self._data["COMMENTS 1"] is None): raise ValueError('comments_1 is not valid.') if ("COMMENTS 2" not in self._data or self._data["COMMENTS 2"] is None): raise ValueError('comments_2 is not valid.') if ("DATA PERIODS" not in self._data or self._data["DATA PERIODS"] is None): raise ValueError('data_periods is not valid.') if ("LOCATION" in self._data and self._data["LOCATION"] is not None): f.write(self._data["LOCATION"].export() + "\n") if ("DESIGN CONDITIONS" in self._data and self._data["DESIGN CONDITIONS"] is not None): f.write(self._data["DESIGN CONDITIONS"].export() + "\n") if ("TYPICAL/EXTREME PERIODS" in self._data and self._data["TYPICAL/EXTREME PERIODS"] is not None): f.write(self._data["TYPICAL/EXTREME PERIODS"].export() + "\n") if ("GROUND TEMPERATURES" in self._data and self._data["GROUND TEMPERATURES"] is not None): f.write(self._data["GROUND TEMPERATURES"].export() + "\n") if ("HOLIDAYS/DAYLIGHT SAVINGS" in self._data and self._data["HOLIDAYS/DAYLIGHT SAVINGS"] is not None): f.write( self._data["HOLIDAYS/DAYLIGHT SAVINGS"].export() + "\n") if ("COMMENTS 1" in self._data and self._data["COMMENTS 1"] is not None): f.write(self._data["COMMENTS 1"].export() + "\n") if ("COMMENTS 2" in self._data and self._data["COMMENTS 2"] is not None): f.write(self._data["COMMENTS 2"].export() + "\n") if ("DATA PERIODS" in self._data and self._data["DATA PERIODS"] is not None): f.write(self._data["DATA PERIODS"].export() + "\n") for item in self._data["WEATHER DATA"]: f.write(item.export(False) + "\n")
797,557
Creates an object depending on `internal_name` Args: internal_name (str): IDD name Raises: ValueError: if `internal_name` cannot be matched to a data dictionary object
def _create_datadict(cls, internal_name): if internal_name == "LOCATION": return Location() if internal_name == "DESIGN CONDITIONS": return DesignConditions() if internal_name == "TYPICAL/EXTREME PERIODS": return TypicalOrExtremePeriods() if internal_name == "GROUND TEMPERATURES": return GroundTemperatures() if internal_name == "HOLIDAYS/DAYLIGHT SAVINGS": return HolidaysOrDaylightSavings() if internal_name == "COMMENTS 1": return Comments1() if internal_name == "COMMENTS 2": return Comments2() if internal_name == "DATA PERIODS": return DataPeriods() raise ValueError( "No DataDictionary known for {}".format(internal_name))
797,558
Read EPW weather data from path. Args: path (str): path to read weather data from
def read(self, path): with open(path, "r") as f: for line in f: line = line.strip() match_obj_name = re.search(r"^([A-Z][A-Z/ \d]+),", line) if match_obj_name is not None: internal_name = match_obj_name.group(1) if internal_name in self._data: self._data[internal_name] = self._create_datadict( internal_name) data_line = line[len(internal_name) + 1:] vals = data_line.strip().split(',') self._data[internal_name].read(vals) else: wd = WeatherData() wd.read(line.strip().split(',')) self.add_weatherdata(wd)
797,559
Read values Args: vals (list): list of strings representing values
def read(self, vals): i = 0 {%- for field in fields %} {%- if field.is_list %} count = int(vals[i]) i += 1 for _ in range(count): obj = {{field.object_name}}() obj.read(vals[i:i + obj.field_count]) self.add_{{field.field_name}}(obj) i += obj.field_count {%- else %} if len(vals[i]) == 0: self.{{field.field_name}} = None else: self.{{field.field_name}} = vals[i] i += 1 {%- endif %} {%- endfor %}
797,648
Repeatedly call fold and merge on data and then finalize. Arguments: data: Input for the fold function. reducer: The IReducer to use. chunk_size: How many items should be passed to fold at a time? Returns: Return value of finalize.
def reduce(reducer, data, chunk_size=DEFAULT_CHUNK_SIZE): if not chunk_size: return finalize(reducer, fold(reducer, data)) # Splitting the work up into chunks allows us to, e.g. reduce a large file # without loading everything into memory, while still being significantly # faster than repeatedly calling the fold function for every element. chunks = generate_chunks(data, chunk_size) intermediate = fold(reducer, next(chunks)) for chunk in chunks: intermediate = merge(reducer, intermediate, fold(reducer, chunk)) return finalize(reducer, intermediate)
797,872
Match grammar function 'f' against next token and set 'self.matched'. Arguments: f: A grammar function - see efilter.parsers.common.grammar. Must return TokenMatch or None. args: Passed to 'f', if any. Returns: Instance of efilter.parsers.common.grammar.TokenMatch or None. Comment: If a match is returned, it will also be stored in self.matched.
def match(self, f, *args): try: match = f(self.tokenizer, *args) except StopIteration: # The grammar function might have tried to access more tokens than # are available. That's not really an error, it just means it didn't # match. return if match is None: return if not isinstance(match, grammar.TokenMatch): raise TypeError("Invalid grammar function %r returned %r." % (f, match)) self.matched = match return match
797,992
Helper: solve 'expr' always returning a scalar (not IRepeated). If the output of 'expr' is a single value or a single RowTuple with a single column then return the value in that column. Otherwise raise. Arguments: expr: Expression to solve. vars: The scope. Returns: A scalar value (not an IRepeated). Raises: EfilterTypeError if it cannot get a scalar.
def __solve_for_scalar(expr, vars): var = solve(expr, vars).value try: scalar = repeated.getvalue(var) except TypeError: raise errors.EfilterTypeError( root=expr, query=expr.source, message="Wasn't expecting more than one value here. Got %r." % (var,)) if isinstance(scalar, row_tuple.RowTuple): try: return scalar.get_singleton() except ValueError: raise errors.EfilterTypeError( root=expr, query=expr.source, message="Was expecting a scalar value here. Got %r." % (scalar,)) else: return scalar
798,005
An expression is an atom or an infix expression. Grammar (sort of, actually a precedence-climbing parser): expression = atom [ binary_operator expression ] . Args: previous_precedence: What operator precedence should we start with?
def expression(self, previous_precedence=0): lhs = self.atom() return self.operator(lhs, previous_precedence)
798,155
Automatically generate implementations for a type. Implement the protocol for the 'for_type' type by dispatching each member function of the protocol to an instance method of the same name declared on the type 'for_type'. Arguments: for_type: The type to implictly implement the protocol with. Raises: TypeError if not all implementations are provided by 'for_type'.
def implicit_static(cls, for_type=None, for_types=None): for type_ in cls.__get_type_args(for_type, for_types): implementations = {} for function in cls.required(): method = getattr(type_, function.__name__, None) if not callable(method): raise TypeError( "%s.implicit invokation on type %r is missing instance " "method %r." % (cls.__name__, type_, function.__name__)) implementations[function] = method for function in cls.optional(): method = getattr(type_, function.__name__, None) if callable(method): implementations[function] = method return cls.implement(for_type=type_, implementations=implementations)
798,222
Parse one of the rules as either objectfilter or dottysql. Example: _parse_query("5 + 5") # Returns Sum(Literal(5), Literal(5)) Arguments: source: A rule in either objectfilter or dottysql syntax. Returns: The AST to represent the rule.
def _parse_query(self, source): if self.OBJECTFILTER_WORDS.search(source): syntax_ = "objectfilter" else: syntax_ = None # Default it is. return query.Query(source, syntax=syntax_)
798,233
Create a syntax parser for this dialect. Arguments: original: The source code of this query. Most often this is a string type, but there are exceptions (e.g. lisp) params: Some dialects support parametric queries (for safety) - if used, pass them as params. This should be a dict for keywords or a tuple for positional.
def __init__(self, original, params=None): super(Syntax, self).__init__() self.params = params self.original = original
798,264
Generates a version string. Arguments: dev_version: Generate a verbose development version from git commits. Examples: 1.1 1.1.dev43 # If 'dev_version' was passed.
def get_version(dev_version=False): if dev_version: version = git_dev_version() if not version: raise RuntimeError("Could not generate dev version from git.") return version return "1!%d.%d" % (MAJOR, MINOR)
798,301
Registers an implementing function for for_type. Arguments: implementation: Callable implementation for this type. for_type: The type this implementation applies to. for_types: Same as for_type, but takes a tuple of types. for_type and for_types cannot both be passed (for obvious reasons.) Raises: ValueError
def implement(self, implementation, for_type=None, for_types=None): unbound_implementation = self.__get_unbound_function(implementation) for_types = self.__get_types(for_type, for_types) for t in for_types: self._write_lock.acquire() try: self.implementations.append((t, unbound_implementation)) finally: self._write_lock.release()
798,326
Add the pattern to a screen. Also fills self.widgets. Args: screen_width (int): the width of the screen screen (lcdprod.Screen): the screen to fill.
def add_to_screen(self, screen_width, screen): for lineno, fields in enumerate(self.line_fields): for left, field in self.compute_positions(screen_width, fields): logger.debug( "Adding field %s to screen %s at x=%d->%d, y=%d", field, screen.ref, left, left + field.width - 1, 1 + lineno, ) self.widgets[field] = field.add_to_screen(screen, left, 1 + lineno) self.register_hooks(field)
798,969
Add a pattern to the list. Args: pattern_txt (str list): the pattern, as a list of lines.
def add(self, pattern_txt): self.patterns[len(pattern_txt)] = pattern_txt low = 0 high = len(pattern_txt) - 1 while not pattern_txt[low]: low += 1 while not pattern_txt[high]: high -= 1 min_pattern = pattern_txt[low:high + 1] self.min_patterns[len(min_pattern)] = min_pattern
798,974
Retrieve the best pattern for a given size. The algorithm is: - If a pattern is registered for the size, use it - Otherwise, find the longest registered pattern shorter thant size, add some blank lines before, and return it - If no shorter pattern exist, return a blank pattern. Args: key (int): the target size Returns: ScreenPattern: the best pattern available for that size
def __getitem__(self, key): if key in self.patterns: return ScreenPattern(self.patterns[key], self.field_registry) for shorter in range(key, 0, -1): if shorter in self.min_patterns: pattern = self.min_patterns[shorter] # Try to vertically center the pattern prefix = [''] * (key - shorter / 2) return ScreenPattern(prefix + pattern, self.field_registry) return ScreenPattern([], self.field_registry)
798,975
Convert a '[user[:pass]@]host:port' string to a Connection tuple. If the given connection is empty, use defaults. If no port is given, use the default. Args: conn (str): the string describing the target hsot/port default_host (str): the host to use if ``conn`` is empty default_port (int): the port to use if not given in ``conn``. Returns: (str, int): a (host, port) tuple.
def _make_hostport(conn, default_host, default_port, default_user='', default_password=None): parsed = urllib.parse.urlparse('//%s' % conn) return Connection( parsed.hostname or default_host, parsed.port or default_port, parsed.username if parsed.username is not None else default_user, parsed.password if parsed.password is not None else default_password, )
799,122
Create a ScreenPatternList from a given pattern text. Args: pattern_txt (str list): the patterns Returns: mpdlcd.display_pattern.ScreenPatternList: a list of patterns from the given entries.
def _make_patterns(patterns): field_registry = display_fields.FieldRegistry() pattern_list = display_pattern.ScreenPatternList( field_registry=field_registry, ) for pattern in patterns: pattern_list.add(pattern.split('\n')) return pattern_list
799,124
Extract options values from a configparser, optparse pair. Options given on command line take precedence over options read in the configuration file. Args: config (dict): option values read from a config file through configparser options (optparse.Options): optparse 'options' object containing options values from the command line *args (str tuple): name of the options to extract
def _extract_options(config, options, *args): extract = {} for key in args: if key not in args: continue extract[key] = config[key] option = getattr(options, key, None) if option is not None: extract[key] = option return extract
799,129
Receive an answer from the daemon and return the response. Args: socket (socket.socket): A socket that is connected to the daemon. Returns: dir or string: The unpickled answer.
def receive_data(socket): answer = b"" while True: packet = socket.recv(4096) if not packet: break answer += packet response = pickle.loads(answer) socket.close() return response
800,720
Connect to a daemon's socket. Args: root_dir (str): The directory that used as root by the daemon. Returns: socket.socket: A socket that is connected to the daemon.
def connect_socket(root_dir): # Get config directory where the daemon socket is located config_dir = os.path.join(root_dir, '.config/pueue') # Create Socket and exit with 1, if socket can't be created try: client = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) socket_path = os.path.join(config_dir, 'pueue.sock') if os.path.exists(socket_path): client.connect(socket_path) else: print("Socket doesn't exist") raise Exception except: print("Error connecting to socket. Make sure the daemon is running") sys.exit(1) return client
800,721
Create a socket for the daemon, depending on the directory location. Args: config_dir (str): The absolute path to the config directory used by the daemon. Returns: socket.socket: The daemon socket. Clients connect to this socket.
def create_socket(self): socket_path = os.path.join(self.config_dir, 'pueue.sock') # Create Socket and exit with 1, if socket can't be created try: if os.path.exists(socket_path): os.remove(socket_path) self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.socket.bind(socket_path) self.socket.setblocking(0) self.socket.listen(0) # Set file permissions os.chmod(socket_path, stat.S_IRWXU) except Exception: self.logger.error("Daemon couldn't socket. Aborting") self.logger.exception() sys.exit(1) return self.socket
801,186
Add a new command to the daemon queue. Args: args['command'] (list(str)): The actual programm call. Something like ['ls', '-a'] or ['ls -al'] root_dir (string): The path to the root directory the daemon is running in.
def execute_add(args, root_dir=None): # We accept a list of strings. # This is done to create a better commandline experience with argparse. command = ' '.join(args['command']) # Send new instruction to daemon instruction = { 'command': command, 'path': os.getcwd() } print_command_factory('add')(instruction, root_dir)
801,230
Edit a existing queue command in the daemon. Args: args['key'] int: The key of the queue entry to be edited root_dir (string): The path to the root directory the daemon is running in.
def execute_edit(args, root_dir=None): # Get editor EDITOR = os.environ.get('EDITOR', 'vim') # Get command from server key = args['key'] status = command_factory('status')({}, root_dir=root_dir) # Check if queue is not empty, the entry exists and is queued or stashed if not isinstance(status['data'], str) and key in status['data']: if status['data'][key]['status'] in ['queued', 'stashed']: command = status['data'][key]['command'] else: print("Entry is not 'queued' or 'stashed'") sys.exit(1) else: print('No entry with this key') sys.exit(1) with tempfile.NamedTemporaryFile(suffix=".tmp") as tf: tf.write(command.encode('utf-8')) tf.flush() call([EDITOR, tf.name]) # do the parsing with `tf` using regular File operations. # for instance: tf.seek(0) edited_command = tf.read().decode('utf-8') print_command_factory('edit')({ 'key': key, 'command': edited_command, }, root_dir=root_dir)
801,231
A factory which returns functions for direct daemon communication. This factory will create a function which sends a payload to the daemon and returns the unpickled object which is returned by the daemon. Args: command (string): The type of payload this should be. This determines as what kind of instruction this will be interpreted by the daemon. Returns: function: The created function.
def command_factory(command): def communicate(body={}, root_dir=None): client = connect_socket(root_dir) body['mode'] = command # Delete the func entry we use to call the correct function with argparse # as functions can't be pickled and this shouldn't be send to the daemon. if 'func' in body: del body['func'] data_string = pickle.dumps(body, -1) client.send(data_string) # Receive message, unpickle and return it response = receive_data(client) return response return communicate
801,232
Print the status of the daemon. This function displays the current status of the daemon as well as the whole queue and all available information about every entry in the queue. `terminaltables` is used to format and display the queue contents. `colorclass` is used to color format the various items in the queue. Args: root_dir (string): The path to the root directory the daemon is running in.
def execute_status(args, root_dir=None): status = command_factory('status')({}, root_dir=root_dir) # First rows, showing daemon status if status['status'] == 'running': status['status'] = Color('{autogreen}' + '{}'.format(status['status']) + '{/autogreen}') elif status['status'] in ['paused']: status['status'] = Color('{autoyellow}' + '{}'.format(status['status']) + '{/autoyellow}') print('Daemon: {}\n'.format(status['status'])) # Handle queue data data = status['data'] if isinstance(data, str): print(data) elif isinstance(data, dict): # Format incomming data to be compatible with Terminaltables formatted_data = [] formatted_data.append(['Index', 'Status', 'Code', 'Command', 'Path', 'Start', 'End']) for key, entry in sorted(data.items(), key=operator.itemgetter(0)): formatted_data.append( [ '#{}'.format(key), entry['status'], '{}'.format(entry['returncode']), entry['command'], entry['path'], entry['start'], entry['end'] ] ) # Create AsciiTable instance and define style table = AsciiTable(formatted_data) table.outer_border = False table.inner_column_border = False terminal_width = terminal_size() customWidth = table.column_widths # If the text is wider than the actual terminal size, we # compute a new size for the Command and Path column. if (reduce(lambda a, b: a+b, table.column_widths) + 10) > terminal_width[0]: # We have to subtract 14 because of table paddings left_space = math.floor((terminal_width[0] - customWidth[0] - customWidth[1] - customWidth[2] - customWidth[5] - customWidth[6] - 14)/2) if customWidth[3] < left_space: customWidth[4] = 2*left_space - customWidth[3] elif customWidth[4] < left_space: customWidth[3] = 2*left_space - customWidth[4] else: customWidth[3] = left_space customWidth[4] = left_space # Format long strings to match the console width for i, entry in enumerate(table.table_data): for j, string in enumerate(entry): max_width = customWidth[j] wrapped_string = '\n'.join(wrap(string, max_width)) if j == 1: if wrapped_string == 'done' or wrapped_string == 'running' or wrapped_string == 'paused': wrapped_string = Color('{autogreen}' + '{}'.format(wrapped_string) + '{/autogreen}') elif wrapped_string in ['queued', 'stashed']: wrapped_string = Color('{autoyellow}' + '{}'.format(wrapped_string) + '{/autoyellow}') elif wrapped_string in ['failed', 'stopping', 'killing']: wrapped_string = Color('{autored}' + '{}'.format(wrapped_string) + '{/autored}') elif j == 2: if wrapped_string == '0' and wrapped_string != 'Code': wrapped_string = Color('{autogreen}' + '{}'.format(wrapped_string) + '{/autogreen}') elif wrapped_string != '0' and wrapped_string != 'Code': wrapped_string = Color('{autored}' + '{}'.format(wrapped_string) + '{/autored}') table.table_data[i][j] = wrapped_string print(table.table) print('')
801,314
Print the current log file. Args: args['keys'] (int): If given, we only look at the specified processes. root_dir (string): The path to the root directory the daemon is running in.
def execute_log(args, root_dir): # Print the logs of all specified processes if args.get('keys'): config_dir = os.path.join(root_dir, '.config/pueue') queue_path = os.path.join(config_dir, 'queue') if os.path.exists(queue_path): queue_file = open(queue_path, 'rb') try: queue = pickle.load(queue_file) except Exception: print('Queue log file seems to be corrupted. Aborting.') return queue_file.close() else: print('There is no queue log file. Aborting.') return for key in args.get('keys'): # Check if there is an entry with this key if queue.get(key) and queue[key]['status'] in ['failed', 'done']: entry = queue[key] print('Log of entry: {}'.format(key)) print('Returncode: {}'.format(entry['returncode'])) print('Command: {}'.format(entry['command'])) print('Path: {}'.format(entry['path'])) print('Start: {}, End: {} \n'.format(entry['start'], entry['end'])) # Write STDERR if len(entry['stderr']) > 0: print(Color('{autored}Stderr output: {/autored}\n ') + entry['stderr']) # Write STDOUT if len(entry['stdout']) > 0: print(Color('{autogreen}Stdout output: {/autogreen}\n ') + entry['stdout']) else: print('No finished process with key {}.'.format(key)) # Print the log of all processes else: log_path = os.path.join(root_dir, '.local/share/pueue/queue.log') log_file = open(log_path, 'r') print(log_file.read())
801,315
Print stderr and stdout of the current running process. Args: args['watch'] (bool): If True, we open a curses session and tail the output live in the console. root_dir (string): The path to the root directory the daemon is running in.
def execute_show(args, root_dir): key = None if args.get('key'): key = args['key'] status = command_factory('status')({}, root_dir=root_dir) if key not in status['data'] or status['data'][key]['status'] != 'running': print('No running process with this key, use `log` to show finished processes.') return # In case no key provided, we take the oldest running process else: status = command_factory('status')({}, root_dir=root_dir) if isinstance(status['data'], str): print(status['data']) return for k in sorted(status['data'].keys()): if status['data'][k]['status'] == 'running': key = k break if key is None: print('No running process, use `log` to show finished processes.') return config_dir = os.path.join(root_dir, '.config/pueue') # Get current pueueSTDout file from tmp stdoutFile = os.path.join(config_dir, 'pueue_process_{}.stdout'.format(key)) stderrFile = os.path.join(config_dir, 'pueue_process_{}.stderr'.format(key)) stdoutDescriptor = open(stdoutFile, 'r') stderrDescriptor = open(stderrFile, 'r') running = True # Continually print output with curses or just print once if args['watch']: # Initialize curses stdscr = curses.initscr() curses.noecho() curses.cbreak() curses.curs_set(2) stdscr.keypad(True) stdscr.refresh() try: # Update output every two seconds while running: stdscr.clear() stdoutDescriptor.seek(0) message = stdoutDescriptor.read() stdscr.addstr(0, 0, message) stdscr.refresh() time.sleep(2) except Exception: # Curses cleanup curses.nocbreak() stdscr.keypad(False) curses.echo() curses.endwin() else: print('Stdout output:\n') stdoutDescriptor.seek(0) print(get_descriptor_output(stdoutDescriptor, key)) print('\n\nStderr output:\n') stderrDescriptor.seek(0) print(get_descriptor_output(stderrDescriptor, key))
801,316
Generate output key material based on an `info` value Arguments: - info - context to generate the OKM - length - length in bytes of the key to generate See the HKDF draft RFC for guidance.
def expand(self, info=b"", length=32): return hkdf_expand(self._prk, info, length, self._hash)
801,401
Return the server's IP address. Params: - addr_family: IPv4, IPv6 or None. None prefers IPv4 but will return IPv6 if IPv4 addr was not available. - access: 'public' or 'private'
def get_ip(self, access='public', addr_family=None, strict=None): if addr_family not in ['IPv4', 'IPv6', None]: raise Exception("`addr_family` must be 'IPv4', 'IPv6' or None") if access not in ['private', 'public']: raise Exception("`access` must be 'public' or 'private'") if not hasattr(self, 'ip_addresses'): self.populate() # server can have several public or private IPs ip_addrs = [ ip_addr for ip_addr in self.ip_addresses if ip_addr.access == access ] # prefer addr_family (or IPv4 if none given) preferred_family = addr_family if addr_family else 'IPv4' for ip_addr in ip_addrs: if ip_addr.family == preferred_family: return ip_addr.address # any IP (of the right access) will do if available and addr_family is None return ip_addrs[0].address if ip_addrs and not addr_family else None
801,438
Convet an epoch date to a tuple in format ("yyyy-mm-dd","hh:mm:ss") Example: "1023456427" -> ("2002-06-07","15:27:07") Parameters: - `timestamp`: date in epoch format
def epochdate(timestamp): dt = datetime.fromtimestamp(float(timestamp)).timetuple() fecha = "{0:d}-{1:02d}-{2:02d}".format(dt.tm_year, dt.tm_mon, dt.tm_mday) hora = "{0:02d}:{1:02d}:{2:02d}".format(dt.tm_hour, dt.tm_min, dt.tm_sec) return (fecha, hora)
802,180
Add given value to item (list) Args: val: A JSON serializable object. Returns: Cache backend response.
def add(self, val): return cache.lpush(self.key, json.dumps(val) if self.serialize else val)
802,525
Removes given item from the list. Args: val: Item Returns: Cache backend response.
def remove_item(self, val): return cache.lrem(self.key, json.dumps(val))
802,527
Removes all keys of this namespace Without args, clears all keys starting with cls.PREFIX if called with args, clears keys starting with given cls.PREFIX + args Args: *args: Arbitrary number of arguments. Returns: List of removed keys.
def flush(cls, *args): return _remove_keys([], [(cls._make_key(args) if args else cls.PREFIX) + '*'])
802,528
Sends a message to possible owners of the current workflows next lane. Args: **kwargs: ``current`` and ``possible_owners`` are required. sender (User): User object
def send_message_for_lane_change(sender, **kwargs): current = kwargs['current'] owners = kwargs['possible_owners'] if 'lane_change_invite' in current.task_data: msg_context = current.task_data.pop('lane_change_invite') else: msg_context = DEFAULT_LANE_CHANGE_INVITE_MSG wfi = WFCache(current).get_instance() # Deletion of used passive task invitation which belongs to previous lane. TaskInvitation.objects.filter(instance=wfi, role=current.role, wf_name=wfi.wf.name).delete() today = datetime.today() for recipient in owners: inv = TaskInvitation( instance=wfi, role=recipient, wf_name=wfi.wf.name, progress=30, start_date=today, finish_date=today + timedelta(15) ) inv.title = current.task_data.get('INVITATION_TITLE') or wfi.wf.title inv.save() # try to send notification, if it fails go on try: recipient.send_notification(title=msg_context['title'], message="%s %s" % (wfi.wf.title, msg_context['body']), typ=1, # info url='', sender=sender ) except: # todo: specify which exception pass
802,540
While splitting channel and moving chosen subscribers to new channel, old channel's messages are copied and moved to new channel. Args: from_channel (Channel object): move messages from channel to_channel (Channel object): move messages to channel
def copy_and_move_messages(from_channel, to_channel): with BlockSave(Message, query_dict={'channel_id': to_channel.key}): for message in Message.objects.filter(channel=from_channel, typ=15): message.key = '' message.channel = to_channel message.save()
802,552
It shows incorrect operations or successful operation messages. Args: title (string): title of message box box_type (string): type of message box (warning, info)
def show_warning_messages(self, title=_(u"Incorrect Operation"), box_type='warning'): msg = self.current.task_data['msg'] self.current.output['msgbox'] = {'type': box_type, "title": title, "msg": msg} del self.current.task_data['msg']
802,553
It returns chosen keys list from a given form. Args: form_info: serialized list of dict form data Returns: selected_keys(list): Chosen keys list selected_names(list): Chosen channels' or subscribers' names.
def return_selected_form_items(form_info): selected_keys = [] selected_names = [] for chosen in form_info: if chosen['choice']: selected_keys.append(chosen['key']) selected_names.append(chosen['name']) return selected_keys, selected_names
802,554
It controls the selection from the form according to the operations, and returns an error message if it does not comply with the rules. Args: form_info: Channel or subscriber form from the user Returns: True or False error message
def selection_error_control(self, form_info): keys, names = self.return_selected_form_items(form_info['ChannelList']) chosen_channels_number = len(keys) if form_info['new_channel'] and chosen_channels_number < 2: return False, _( u"You should choose at least two channel to merge operation at a new channel.") elif form_info['existing_channel'] and chosen_channels_number == 0: return False, _( u"You should choose at least one channel to merge operation with existing channel.") elif form_info['find_chosen_channel'] and chosen_channels_number != 1: return False, _(u"You should choose one channel for split operation.") return True, None
802,555
Writes user data to session. Args: user: User object
def set_user(self, user): self.session['user_id'] = user.key self.session['user_data'] = user.clean_value() role = self.get_role() # TODO: this should be remembered from previous login # self.session['role_data'] = default_role.clean_value() self.session['role_id'] = role.key self.current.role_id = role.key self.current.user_id = user.key # self.perm_cache = PermissionCache(role.key) self.session['permissions'] = role.get_permissions()
802,596
Initializes the workflow with given request, response objects and diagram name. Args: session: input: workflow_name (str): Name of workflow diagram without ".bpmn" suffix. File must be placed under one of configured :py:attr:`~zengine.settings.WORKFLOW_PACKAGES_PATHS`
def start_engine(self, **kwargs): self.current = WFCurrent(**kwargs) self.wf_state = {'in_external': False, 'finished': False} if not self.current.new_token: self.wf_state = self.current.wf_cache.get(self.wf_state) self.current.workflow_name = self.wf_state['name'] # if we have a pre-selected object to work with, # inserting it as current.input['id'] and task_data['object_id'] if 'subject' in self.wf_state: self.current.input['id'] = self.wf_state['subject'] self.current.task_data['object_id'] = self.wf_state['subject'] self.check_for_authentication() self.check_for_permission() self.workflow = self.load_or_create_workflow() # if form data exists in input (user submitted) # put form data in wf task_data if 'form' in self.current.input: form = self.current.input['form'] if 'form_name' in form: self.current.task_data[form['form_name']] = form # in wf diagram, if property is stated as init = True # demanded initial values are assigned and put to cache start_init_values = self.workflow_spec.wf_properties.get('init', 'False') == 'True' if start_init_values: WFInit = get_object_from_path(settings.WF_INITIAL_VALUES)() WFInit.assign_wf_initial_values(self.current) log_msg = ("\n\n::::::::::: ENGINE STARTED :::::::::::\n" "\tWF: %s (Possible) TASK:%s\n" "\tCMD:%s\n" "\tSUBCMD:%s" % ( self.workflow.name, self.workflow.get_tasks(Task.READY), self.current.input.get('cmd'), self.current.input.get('subcmd'))) log.debug(log_msg) sys._zops_wf_state_log = log_msg self.current.workflow = self.workflow
802,697
Imports the module that contains the referenced method. Args: path: python path of class/function look_for_cls_method (bool): If True, treat the last part of path as class method. Returns: Tuple. (class object, class name, method to be called)
def _import_object(self, path, look_for_cls_method): last_nth = 2 if look_for_cls_method else 1 path = path.split('.') module_path = '.'.join(path[:-last_nth]) class_name = path[-last_nth] module = importlib.import_module(module_path) if look_for_cls_method and path[-last_nth:][0] == path[-last_nth]: class_method = path[-last_nth:][1] else: class_method = None return getattr(module, class_name), class_name, class_method
802,710
Creates some aliases for attributes of ``current``. Args: current: :attr:`~zengine.engine.WFCurrent` object.
def set_current(self, current): self.current = current self.input = current.input # self.req = current.request # self.resp = current.response self.output = current.output self.cmd = current.task_data['cmd'] if self.cmd and NEXT_CMD_SPLITTER in self.cmd: self.cmd, self.next_cmd = self.cmd.split(NEXT_CMD_SPLITTER) else: self.next_cmd = None
802,744
Renders form. Applies form modifiers, then writes result to response payload. If supplied, given form object instance will be used instead of view's default ObjectForm. Args: _form (:py:attr:`~zengine.forms.json_form.JsonForm`): Form object to override `self.object_form`
def form_out(self, _form=None): _form = _form or self.object_form self.output['forms'] = _form.serialize() self._add_meta_props(_form) self.output['forms']['grouping'] = _form.Meta.grouping self.output['forms']['constraints'] = _form.Meta.constraints self._patch_form(self.output['forms']) self.set_client_cmd('form')
802,746
Adds given cmd(s) to ``self.output['client_cmd']`` Args: *args: Client commands.
def set_client_cmd(self, *args): self.client_cmd.update(args) self.output['client_cmd'] = list(self.client_cmd)
802,747
load xml from given path Args: path: diagram path Returns:
def get_wf_from_path(self, path): with open(path) as fp: content = fp.read() return [(os.path.basename(os.path.splitext(path)[0]), content), ]
802,774
Handles pagination of object listings. Args: current_page int: Current page number query_set (:class:`QuerySet<pyoko:pyoko.db.queryset.QuerySet>`): Object listing queryset. per_page int: Objects per page. Returns: QuerySet object, pagination data dict as a tuple
def _paginate(self, current_page, query_set, per_page=10): total_objects = query_set.count() total_pages = int(total_objects / per_page or 1) # add orphans to last page current_per_page = per_page + ( total_objects % per_page if current_page == total_pages else 0) pagination_data = dict(page=current_page, total_pages=total_pages, total_objects=total_objects, per_page=current_per_page) query_set = query_set.set_params(rows=current_per_page, start=(current_page - 1) * per_page) return query_set, pagination_data
802,789
Creates a direct messaging channel between two user Args: initiator: User, who want's to make first contact receiver: User, other party Returns: (Channel, receiver_name)
def get_or_create_direct_channel(cls, initiator_key, receiver_key): existing = cls.objects.OR().filter( code_name='%s_%s' % (initiator_key, receiver_key)).filter( code_name='%s_%s' % (receiver_key, initiator_key)) receiver_name = UserModel.objects.get(receiver_key).full_name if existing: channel = existing[0] else: channel_name = '%s_%s' % (initiator_key, receiver_key) channel = cls(is_direct=True, code_name=channel_name, typ=10).blocking_save() with BlockSave(Subscriber): Subscriber.objects.get_or_create(channel=channel, user_id=initiator_key, name=receiver_name) Subscriber.objects.get_or_create(channel=channel, user_id=receiver_key, name=UserModel.objects.get(initiator_key).full_name) return channel, receiver_name
802,815
Serializes message for given user. Note: Should be called before first save(). Otherwise "is_update" will get wrong value. Args: user: User object Returns: Dict. JSON serialization ready dictionary object
def serialize(self, user=None): return { 'content': self.body, 'type': self.typ, 'updated_at': self.updated_at, 'timestamp': self.updated_at, 'is_update': not hasattr(self, 'unsaved'), 'attachments': [attachment.serialize() for attachment in self.attachment_set], 'title': self.msg_title, 'url': self.url, 'sender_name': self.sender.full_name, 'sender_key': self.sender.key, 'channel_key': self.channel.key, 'cmd': 'message', 'avatar_url': self.sender.avatar, 'key': self.key, }
802,833
Kullanıcı şifresini encrypt ederek set eder. Args: raw_password (str)
def set_password(self, raw_password): self.password = pbkdf2_sha512.encrypt(raw_password, rounds=10000, salt_size=10)
802,894
sends message to users private mq exchange Args: title: message: sender: url: typ:
def send_notification(self, title, message, typ=1, url=None, sender=None): self.created_channels.channel.add_message( channel_key=self.prv_exchange, body=message, title=title, typ=typ, url=url, sender=sender, receiver=self )
802,900
Send arbitrary cmd and data to client if queue name passed by "via_queue" parameter, that queue will be used instead of users private exchange. Args: data: dict cmd: string via_queue: queue name,
def send_client_cmd(self, data, cmd=None, via_queue=None): mq_channel = self._connect_mq() if cmd: data['cmd'] = cmd if via_queue: mq_channel.basic_publish(exchange='', routing_key=via_queue, body=json.dumps(data)) else: mq_channel.basic_publish(exchange=self.prv_exchange, routing_key='', body=json.dumps(data))
802,901
Soyut Role Permission nesnesi tanımlamayı sağlar. Args: perm (object):
def add_permission(self, perm): self.Permissions(permission=perm) PermissionCache.flush() self.save()
802,983
Adds a permission with given name. Args: code (str): Code name of the permission. save (bool): If False, does nothing.
def add_permission_by_name(self, code, save=False): if not save: return ["%s | %s" % (p.name, p.code) for p in Permission.objects.filter(code__contains=code)] for p in Permission.objects.filter(code__contains=code): if p not in self.Permissions: self.Permissions(permission=p) if p: self.save()
802,985
Disconnect receiver from sender for signal. If weak references are used, disconnect need not be called. The receiver will be remove from dispatch automatically. Arguments: receiver The registered receiver to disconnect. May be none if dispatch_uid is specified. sender The registered sender to disconnect dispatch_uid the unique identifier of the receiver to disconnect
def disconnect(self, receiver=None, sender=None, dispatch_uid=None): if dispatch_uid: lookup_key = (dispatch_uid, _make_id(sender)) else: lookup_key = (_make_id(receiver), _make_id(sender)) disconnected = False with self.lock: self._clear_dead_receivers() for index in range(len(self.receivers)): (r_key, _) = self.receivers[index] if r_key == lookup_key: disconnected = True del self.receivers[index] break self.sender_receivers_cache.clear() return disconnected
802,996
Prepare links of form by mimicing pyoko's get_links method's result Args: **kw: Returns: list of link dicts
def get_links(self, **kw): links = [a for a in dir(self) if isinstance(getattr(self, a), Model) and not a.startswith('_model')] return [ { 'field': l, 'mdl': getattr(self, l).__class__, } for l in links ]
803,068
Fills form with data Args: data (dict): Data to assign form fields. Returns: Self. Form object.
def set_data(self, data): for name in self._fields: setattr(self, name, data.get(name)) return self
803,071
Caches some form details to lates process and validate incoming (response) form data Args: form: form dict
def _cache_form_details(self, form): cache = FormCache() form['model']['form_key'] = cache.form_id form['model']['form_name'] = self.__class__.__name__ cache.set( { 'model': list(form['model'].keys()), # In Python 3, dictionary keys are not serializable 'non_data_fields': self.non_data_fields } )
803,074
Sets user notification message. Args: title: Msg. title msg: Msg. text typ: Msg. type url: Additional URL (if exists) Returns: Message ID.
def set_message(self, title, msg, typ, url=None): return self.user.send_notification(title=title, message=msg, typ=typ, url=url)
803,101
Checks if current user (or role) has the given permission. Args: perm: Permmission code or object. Depends on the :attr:`~zengine.auth.auth_backend.AuthBackend` implementation. Returns: Boolean.
def has_permission(self, perm): return self.user.superuser or self.auth.has_permission(perm)
803,103
Assigns current task step to self.task then updates the task's data with self.task_data Args: task: Task object.
def _update_task(self, task): self.task = task self.task.data.update(self.task_data) self.task_type = task.task_spec.__class__.__name__ self.spec = task.task_spec self.task_name = task.get_name() self.activity = getattr(self.spec, 'service_class', '') self._set_lane_data()
803,109
GET method handler Args: req: Request object. resp: Response object.
def __init__(self, current): import sys read_existing = set(sys.PYOKO_LOGS['read']) - set(sys.PYOKO_LOGS['new']) current.output = { 'response': "DB Access Stats: {}".format(str(sys.PYOKO_STAT_COUNTER), str(read_existing)), 'http_headers': (('Content-Type', 'text/plain'),), } sys.PYOKO_LOGS = { "save": 0, "update": 0, "read": 0, "count": 0, "search": 0, }
803,153
GET method handler Args: req: Request object. resp: Response object.
def __init__(self, current): import sys from pyoko.modelmeta import model_registry out = [] for mdl_name in sys.PYOKO_LOGS.copy(): try: mdl = model_registry.get_model(mdl_name) except KeyError: continue bucket_name = mdl.objects.adapter.bucket.name mdl.objects.adapter.bucket.set_decoder('application/json', lambda a: bytes_to_str(a)) for k in set(sys.PYOKO_LOGS[mdl_name]): if k not in sys.PYOKO_LOGS['new']: obj = mdl.objects.data().get(k) print(obj) out.append("{}/|{}/|{}".format( bucket_name, k, obj[0])) # print(str(mdl.objects.get(k).name)) sys.PYOKO_LOGS[mdl_name] = [] mdl.objects.adapter.bucket.set_decoder('application/json', binary_json_decoder) sys.PYOKO_LOGS['new'] = [] current.output = { 'response': "\n".join(out), 'http_headers': (('Content-Type', 'text/plain; charset=utf-8'), ), }
803,154
Creates a menu entry for given model data. Updates results in place. Args: model_data: Model data. object_type: Relation name. results: Results dict.
def _add_crud(self, model_data, object_type, results): model = model_registry.get_model(model_data['name']) field_name = model_data.get('field') verbose_name = model_data.get('verbose_name', model.Meta.verbose_name_plural) category = model_data.get('category', settings.DEFAULT_OBJECT_CATEGORY_NAME) wf_dict = {"text": verbose_name, "wf": model_data.get('wf', "crud"), "model": model_data['name'], "kategori": category} if field_name: wf_dict['param'] = field_name results[object_type].append(wf_dict) self._add_to_quick_menu(wf_dict['model'], wf_dict)
803,196
Appends menu entries to dashboard quickmenu according to :attr:`zengine.settings.QUICK_MENU` Args: key: workflow name wf: workflow menu entry
def _add_to_quick_menu(self, key, wf): if key in settings.QUICK_MENU: self.output['quick_menu'].append(wf)
803,197
AMQP connection callback. Creates input channel. Args: connection: AMQP connection
def on_connected(self, connection): log.info('PikaClient: connected to RabbitMQ') self.connected = True self.in_channel = self.connection.channel(self.on_channel_open)
803,206
Input channel creation callback Queue declaration done here Args: channel: input channel
def on_channel_open(self, channel): self.in_channel.exchange_declare(exchange='input_exc', type='topic', durable=True) channel.queue_declare(callback=self.on_input_queue_declare, queue=self.INPUT_QUEUE_NAME)
803,207
Input queue declaration callback. Input Queue/Exchange binding done here Args: queue: input queue
def on_input_queue_declare(self, queue): self.in_channel.queue_bind(callback=None, exchange='input_exc', queue=self.INPUT_QUEUE_NAME, routing_key="#")
803,208
Overrides ProcessParser.parse_node Parses and attaches the inputOutput tags that created by Camunda Modeller Args: node: xml task node Returns: TaskSpec
def parse_node(self, node): spec = super(CamundaProcessParser, self).parse_node(node) spec.data = self._parse_input_data(node) spec.data['lane_data'] = self._get_lane_properties(node) spec.defines = spec.data service_class = node.get(full_attr('assignee')) if service_class: self.parsed_nodes[node.get('id')].service_class = node.get(full_attr('assignee')) return spec
803,317
Parses inputOutput part camunda modeller extensions. Args: node: SpiffWorkflow Node object. Returns: Data dict.
def _parse_input_data(self, node): data = DotDict() try: for nod in self._get_input_nodes(node): data.update(self._parse_input_node(nod)) except Exception as e: log.exception("Error while processing node: %s" % node) return data
803,321
Parses the given XML node Args: node (xml): XML node. .. code-block:: xml <bpmn2:lane id="Lane_8" name="Lane 8"> <bpmn2:extensionElements> <camunda:properties> <camunda:property value="foo,bar" name="perms"/> </camunda:properties> </bpmn2:extensionElements> </bpmn2:lane> Returns: {'perms': 'foo,bar'}
def _get_lane_properties(self, node): lane_name = self.get_lane(node.get('id')) lane_data = {'name': lane_name} for a in self.xpath(".//bpmn:lane[@name='%s']/*/*/" % lane_name): lane_data[a.attrib['name']] = a.attrib['value'].strip() return lane_data
803,323
Generates wf packages from workflow diagrams. Args: workflow_name: Name of wf workflow_files: Diagram file. Returns: Workflow package (file like) object
def package_in_memory(cls, workflow_name, workflow_files): s = StringIO() p = cls(s, workflow_name, meta_data=[]) p.add_bpmn_files_by_glob(workflow_files) p.create_package() return s.getvalue()
803,327
this is a pika.basic_consumer callback handles client inputs, runs appropriate workflows and views Args: ch: amqp channel method: amqp method properties: body: message body
def handle_message(self, ch, method, properties, body): input = {} headers = {} try: self.sessid = method.routing_key input = json_decode(body) data = input['data'] # since this comes as "path" we dont know if it's view or workflow yet # TODO: just a workaround till we modify ui to if 'path' in data: if data['path'] in VIEW_METHODS: data['view'] = data['path'] else: data['wf'] = data['path'] session = Session(self.sessid) headers = {'remote_ip': input['_zops_remote_ip'], 'source': input['_zops_source']} if 'wf' in data: output = self._handle_workflow(session, data, headers) elif 'job' in data: self._handle_job(session, data, headers) return else: output = self._handle_view(session, data, headers) except HTTPError as e: import sys if hasattr(sys, '_called_from_test'): raise output = {"cmd": "error", "error": self._prepare_error_msg(e.message), "code": e.code} log.exception("Http error occurred") except: self.current = Current(session=session, input=data) self.current.headers = headers import sys if hasattr(sys, '_called_from_test'): raise err = traceback.format_exc() output = {"cmd": "error", "error": self._prepare_error_msg(err), "code": 500} log.exception("Worker error occurred with messsage body:\n%s" % body) if 'callbackID' in input: output['callbackID'] = input['callbackID'] log.info("OUTPUT for %s: %s" % (self.sessid, output)) output['reply_timestamp'] = time() self.send_output(output)
803,378
if xml content updated, create a new entry for given wf name Args: name: name of wf content: xml content Returns (DiagramXML(), bool): A tuple with two members. (DiagramXML instance and True if it's new or False it's already exists
def get_or_create_by_content(cls, name, content): new = False diagrams = cls.objects.filter(name=name) if diagrams: diagram = diagrams[0] if diagram.body != content: new = True else: new = True if new: diagram = cls(name=name, body=content).save() return diagram, new
803,389
updates xml link if there aren't any running instances of this wf Args: diagram: XMLDiagram object
def set_xml(self, diagram, force=False): no_of_running = WFInstance.objects.filter(wf=self, finished=False, started=True).count() if no_of_running and not force: raise RunningInstancesExist( "Can't update WF diagram! Running %s WF instances exists for %s" % ( no_of_running, self.name )) else: self.xml = diagram parser = BPMNParser(diagram.body) self.description = parser.get_description() self.title = parser.get_name() or self.name.replace('_', ' ').title() extensions = dict(parser.get_wf_extensions()) self.programmable = extensions.get('programmable', False) self.task_type = extensions.get('task_type', None) self.menu_category = extensions.get('menu_category', settings.DEFAULT_WF_CATEGORY_NAME) self.save()
803,394