docstring stringlengths 52 499 | function stringlengths 67 35.2k | __index_level_0__ int64 52.6k 1.16M |
|---|---|---|
Corresponds to IDD Field `relative_humidity`
Args:
value (int): value for IDD Field `relative_humidity`
value >= 0
value <= 110
Missing value: 999
if `value` is None it will not be checked against the
specification and ... | def relative_humidity(self, value=999):
if value is not None:
try:
value = int(value)
except ValueError:
raise ValueError('value {} need to be of type int '
'for field `relative_humidity`'.format(value))
... | 797,527 |
Corresponds to IDD Field `atmospheric_station_pressure`
Args:
value (int): value for IDD Field `atmospheric_station_pressure`
Unit: Pa
value > 31000
value < 120000
Missing value: 999999
if `value` is None it will not be... | def atmospheric_station_pressure(self, value=999999):
if value is not None:
try:
value = int(value)
except ValueError:
raise ValueError(
'value {} need to be of type int '
'for field `atmospheric_station_pre... | 797,528 |
Corresponds to IDD Field `extraterrestrial_horizontal_radiation`
Args:
value (float): value for IDD Field `extraterrestrial_horizontal_radiation`
Unit: Wh/m2
value >= 0.0
Missing value: 9999.0
if `value` is None it will not be checked ... | def extraterrestrial_horizontal_radiation(self, value=9999.0):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError(
'value {} need to be of type float '
'for field `extraterre... | 797,529 |
Corresponds to IDD Field `extraterrestrial_direct_normal_radiation`
Args:
value (float): value for IDD Field `extraterrestrial_direct_normal_radiation`
Unit: Wh/m2
value >= 0.0
Missing value: 9999.0
if `value` is None it will not be ch... | def extraterrestrial_direct_normal_radiation(self, value=9999.0):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError(
'value {} need to be of type float '
'for field `extrate... | 797,530 |
Corresponds to IDD Field `horizontal_infrared_radiation_intensity`
Args:
value (float): value for IDD Field `horizontal_infrared_radiation_intensity`
Unit: Wh/m2
value >= 0.0
Missing value: 9999.0
if `value` is None it will not be chec... | def horizontal_infrared_radiation_intensity(self, value=9999.0):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError(
'value {} need to be of type float '
'for field `horizont... | 797,531 |
Corresponds to IDD Field `global_horizontal_radiation`
Args:
value (float): value for IDD Field `global_horizontal_radiation`
Unit: Wh/m2
value >= 0.0
Missing value: 9999.0
if `value` is None it will not be checked against the
... | def global_horizontal_radiation(self, value=9999.0):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError(
'value {} need to be of type float '
'for field `global_horizontal_ra... | 797,532 |
Corresponds to IDD Field `direct_normal_radiation`
Args:
value (float): value for IDD Field `direct_normal_radiation`
Unit: Wh/m2
value >= 0.0
Missing value: 9999.0
if `value` is None it will not be checked against the
... | def direct_normal_radiation(self, value=9999.0):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError(
'value {} need to be of type float '
'for field `direct_normal_radiation`... | 797,533 |
Corresponds to IDD Field `diffuse_horizontal_radiation`
Args:
value (float): value for IDD Field `diffuse_horizontal_radiation`
Unit: Wh/m2
value >= 0.0
Missing value: 9999.0
if `value` is None it will not be checked against the
... | def diffuse_horizontal_radiation(self, value=9999.0):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError(
'value {} need to be of type float '
'for field `diffuse_horizontal_... | 797,534 |
Corresponds to IDD Field `global_horizontal_illuminance`
will be missing if >= 999900
Args:
value (float): value for IDD Field `global_horizontal_illuminance`
Unit: lux
value >= 0.0
Missing value: 999999.0
if `value` is None it... | def global_horizontal_illuminance(self, value=999999.0):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError(
'value {} need to be of type float '
'for field `global_horizonta... | 797,535 |
Corresponds to IDD Field `direct_normal_illuminance`
will be missing if >= 999900
Args:
value (float): value for IDD Field `direct_normal_illuminance`
Unit: lux
value >= 0.0
Missing value: 999999.0
if `value` is None it will no... | def direct_normal_illuminance(self, value=999999.0):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError(
'value {} need to be of type float '
'for field `direct_normal_illumi... | 797,536 |
Corresponds to IDD Field `diffuse_horizontal_illuminance`
will be missing if >= 999900
Args:
value (float): value for IDD Field `diffuse_horizontal_illuminance`
Unit: lux
value >= 0.0
Missing value: 999999.0
if `value` is None ... | def diffuse_horizontal_illuminance(self, value=999999.0):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError(
'value {} need to be of type float '
'for field `diffuse_horizon... | 797,537 |
Corresponds to IDD Field `zenith_luminance`
will be missing if >= 9999
Args:
value (float): value for IDD Field `zenith_luminance`
Unit: Cd/m2
value >= 0.0
Missing value: 9999.0
if `value` is None it will not be checked against... | def zenith_luminance(self, value=9999.0):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `zenith_luminance`'.format(value))
... | 797,538 |
Corresponds to IDD Field `wind_direction`
Args:
value (float): value for IDD Field `wind_direction`
Unit: degrees
value >= 0.0
value <= 360.0
Missing value: 999.0
if `value` is None it will not be checked against the
... | def wind_direction(self, value=999.0):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `wind_direction`'.format(value))
... | 797,539 |
Corresponds to IDD Field `wind_speed`
Args:
value (float): value for IDD Field `wind_speed`
Unit: m/s
value >= 0.0
value <= 40.0
Missing value: 999.0
if `value` is None it will not be checked against the
... | def wind_speed(self, value=999.0):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `wind_speed`'.format(value))
if val... | 797,540 |
Corresponds to IDD Field `visibility` This is the value for
visibility in km. (Horizontal visibility at the time indicated.)
Args:
value (float): value for IDD Field `visibility`
Unit: km
Missing value: 9999.0
if `value` is None it will not be... | def visibility(self, value=9999.0):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `visibility`'.format(value))
self._vi... | 797,543 |
Corresponds to IDD Field `present_weather_codes`
Args:
value (int): value for IDD Field `present_weather_codes`
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value... | def present_weather_codes(self, value=None):
if value is not None:
try:
value = int(value)
except ValueError:
raise ValueError(
'value {} need to be of type int '
'for field `present_weather_codes`'.format(v... | 797,546 |
Corresponds to IDD Field `precipitable_water`
Args:
value (float): value for IDD Field `precipitable_water`
Unit: mm
Missing value: 999.0
if `value` is None it will not be checked against the
specification and is assumed to be a missin... | def precipitable_water(self, value=999.0):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError(
'value {} need to be of type float '
'for field `precipitable_water`'.format(va... | 797,547 |
Corresponds to IDD Field `aerosol_optical_depth`
Args:
value (float): value for IDD Field `aerosol_optical_depth`
Unit: thousandths
Missing value: 0.999
if `value` is None it will not be checked against the
specification and is assumed... | def aerosol_optical_depth(self, value=0.999):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError(
'value {} need to be of type float '
'for field `aerosol_optical_depth`'.for... | 797,548 |
Corresponds to IDD Field `snow_depth`
Args:
value (float): value for IDD Field `snow_depth`
Unit: cm
Missing value: 999.0
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
... | def snow_depth(self, value=999.0):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `snow_depth`'.format(value))
self._sno... | 797,549 |
Corresponds to IDD Field `days_since_last_snowfall`
Args:
value (int): value for IDD Field `days_since_last_snowfall`
Missing value: 99
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
R... | def days_since_last_snowfall(self, value=99):
if value is not None:
try:
value = int(value)
except ValueError:
raise ValueError(
'value {} need to be of type int '
'for field `days_since_last_snowfall`'.form... | 797,550 |
Corresponds to IDD Field `albedo`
Args:
value (float): value for IDD Field `albedo`
Missing value: 999.0
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: ... | def albedo(self, value=999.0):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `albedo`'.format(value))
self._albedo = va... | 797,551 |
Corresponds to IDD Field `liquid_precipitation_depth`
Args:
value (float): value for IDD Field `liquid_precipitation_depth`
Unit: mm
Missing value: 999.0
if `value` is None it will not be checked against the
specification and is assume... | def liquid_precipitation_depth(self, value=999.0):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError(
'value {} need to be of type float '
'for field `liquid_precipitation_d... | 797,552 |
Corresponds to IDD Field `liquid_precipitation_quantity`
Args:
value (float): value for IDD Field `liquid_precipitation_quantity`
Unit: hr
Missing value: 99.0
if `value` is None it will not be checked against the
specification and is a... | def liquid_precipitation_quantity(self, value=99.0):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError(
'value {} need to be of type float '
'for field `liquid_precipitation... | 797,553 |
Exports object to its string representation.
Args:
top (bool): if True appends `internal_name` before values.
All non list objects should be exported with value top=True,
all list objects, that are embedded in as fields inlist objects
should be expor... | def export(self, top=True):
out = []
if top:
out.append(self._internal_name)
out.append(self._to_str(self.year))
out.append(self._to_str(self.month))
out.append(self._to_str(self.day))
out.append(self._to_str(self.hour))
out.append(self._to_st... | 797,554 |
Appends weather data.
Args:
data (WeatherData): weather data object | def add_weatherdata(self, data):
if not isinstance(data, WeatherData):
raise ValueError('Weather data need to be of type WeatherData')
self._data["WEATHER DATA"].append(data) | 797,556 |
Save WeatherData in EPW format to path.
Args:
path (str): path where EPW file should be saved | def save(self, path, check=True):
with open(path, 'w') as f:
if check:
if ("LOCATION" not in self._data or
self._data["LOCATION"] is None):
raise ValueError('location is not valid.')
if ("DESIGN CONDITIONS" not in s... | 797,557 |
Creates an object depending on `internal_name`
Args:
internal_name (str): IDD name
Raises:
ValueError: if `internal_name` cannot be matched to a data dictionary object | def _create_datadict(cls, internal_name):
if internal_name == "LOCATION":
return Location()
if internal_name == "DESIGN CONDITIONS":
return DesignConditions()
if internal_name == "TYPICAL/EXTREME PERIODS":
return TypicalOrExtremePeriods()
if i... | 797,558 |
Read EPW weather data from path.
Args:
path (str): path to read weather data from | def read(self, path):
with open(path, "r") as f:
for line in f:
line = line.strip()
match_obj_name = re.search(r"^([A-Z][A-Z/ \d]+),", line)
if match_obj_name is not None:
internal_name = match_obj_name.group(1)
... | 797,559 |
Read values
Args:
vals (list): list of strings representing values | def read(self, vals):
i = 0
{%- for field in fields %}
{%- if field.is_list %}
count = int(vals[i])
i += 1
for _ in range(count):
obj = {{field.object_name}}()
obj.read(vals[i:i + obj.field_count])
self.add_{{field.field_name}}... | 797,648 |
Repeatedly call fold and merge on data and then finalize.
Arguments:
data: Input for the fold function.
reducer: The IReducer to use.
chunk_size: How many items should be passed to fold at a time?
Returns:
Return value of finalize. | def reduce(reducer, data, chunk_size=DEFAULT_CHUNK_SIZE):
if not chunk_size:
return finalize(reducer, fold(reducer, data))
# Splitting the work up into chunks allows us to, e.g. reduce a large file
# without loading everything into memory, while still being significantly
# faster than repe... | 797,872 |
Match grammar function 'f' against next token and set 'self.matched'.
Arguments:
f: A grammar function - see efilter.parsers.common.grammar. Must
return TokenMatch or None.
args: Passed to 'f', if any.
Returns:
Instance of efilter.parsers.common.gram... | def match(self, f, *args):
try:
match = f(self.tokenizer, *args)
except StopIteration:
# The grammar function might have tried to access more tokens than
# are available. That's not really an error, it just means it didn't
# match.
ret... | 797,992 |
Helper: solve 'expr' always returning a scalar (not IRepeated).
If the output of 'expr' is a single value or a single RowTuple with a single
column then return the value in that column. Otherwise raise.
Arguments:
expr: Expression to solve.
vars: The scope.
Returns:
A scalar v... | def __solve_for_scalar(expr, vars):
var = solve(expr, vars).value
try:
scalar = repeated.getvalue(var)
except TypeError:
raise errors.EfilterTypeError(
root=expr, query=expr.source,
message="Wasn't expecting more than one value here. Got %r."
% (var,)... | 798,005 |
An expression is an atom or an infix expression.
Grammar (sort of, actually a precedence-climbing parser):
expression = atom [ binary_operator expression ] .
Args:
previous_precedence: What operator precedence should we start with? | def expression(self, previous_precedence=0):
lhs = self.atom()
return self.operator(lhs, previous_precedence) | 798,155 |
Automatically generate implementations for a type.
Implement the protocol for the 'for_type' type by dispatching each
member function of the protocol to an instance method of the same name
declared on the type 'for_type'.
Arguments:
for_type: The type to implictly implement... | def implicit_static(cls, for_type=None, for_types=None):
for type_ in cls.__get_type_args(for_type, for_types):
implementations = {}
for function in cls.required():
method = getattr(type_, function.__name__, None)
if not callable(method):
... | 798,222 |
Parse one of the rules as either objectfilter or dottysql.
Example:
_parse_query("5 + 5")
# Returns Sum(Literal(5), Literal(5))
Arguments:
source: A rule in either objectfilter or dottysql syntax.
Returns:
The AST to represent the rule. | def _parse_query(self, source):
if self.OBJECTFILTER_WORDS.search(source):
syntax_ = "objectfilter"
else:
syntax_ = None # Default it is.
return query.Query(source, syntax=syntax_) | 798,233 |
Create a syntax parser for this dialect.
Arguments:
original: The source code of this query. Most often this is a
string type, but there are exceptions (e.g. lisp)
params: Some dialects support parametric queries (for safety) -
if used, pass them as param... | def __init__(self, original, params=None):
super(Syntax, self).__init__()
self.params = params
self.original = original | 798,264 |
Generates a version string.
Arguments:
dev_version: Generate a verbose development version from git commits.
Examples:
1.1
1.1.dev43 # If 'dev_version' was passed. | def get_version(dev_version=False):
if dev_version:
version = git_dev_version()
if not version:
raise RuntimeError("Could not generate dev version from git.")
return version
return "1!%d.%d" % (MAJOR, MINOR) | 798,301 |
Registers an implementing function for for_type.
Arguments:
implementation: Callable implementation for this type.
for_type: The type this implementation applies to.
for_types: Same as for_type, but takes a tuple of types.
for_type and for_types cannot both be p... | def implement(self, implementation, for_type=None, for_types=None):
unbound_implementation = self.__get_unbound_function(implementation)
for_types = self.__get_types(for_type, for_types)
for t in for_types:
self._write_lock.acquire()
try:
self.im... | 798,326 |
Add the pattern to a screen.
Also fills self.widgets.
Args:
screen_width (int): the width of the screen
screen (lcdprod.Screen): the screen to fill. | def add_to_screen(self, screen_width, screen):
for lineno, fields in enumerate(self.line_fields):
for left, field in self.compute_positions(screen_width, fields):
logger.debug(
"Adding field %s to screen %s at x=%d->%d, y=%d",
field, s... | 798,969 |
Add a pattern to the list.
Args:
pattern_txt (str list): the pattern, as a list of lines. | def add(self, pattern_txt):
self.patterns[len(pattern_txt)] = pattern_txt
low = 0
high = len(pattern_txt) - 1
while not pattern_txt[low]:
low += 1
while not pattern_txt[high]:
high -= 1
min_pattern = pattern_txt[low:high + 1]
s... | 798,974 |
Retrieve the best pattern for a given size.
The algorithm is:
- If a pattern is registered for the size, use it
- Otherwise, find the longest registered pattern shorter thant size, add
some blank lines before, and return it
- If no shorter pattern exist, return a blank patte... | def __getitem__(self, key):
if key in self.patterns:
return ScreenPattern(self.patterns[key], self.field_registry)
for shorter in range(key, 0, -1):
if shorter in self.min_patterns:
pattern = self.min_patterns[shorter]
# Try to vertically... | 798,975 |
Convert a '[user[:pass]@]host:port' string to a Connection tuple.
If the given connection is empty, use defaults.
If no port is given, use the default.
Args:
conn (str): the string describing the target hsot/port
default_host (str): the host to use if ``conn`` is empty
default_port... | def _make_hostport(conn, default_host, default_port, default_user='', default_password=None):
parsed = urllib.parse.urlparse('//%s' % conn)
return Connection(
parsed.hostname or default_host,
parsed.port or default_port,
parsed.username if parsed.username is not None else default_us... | 799,122 |
Create a ScreenPatternList from a given pattern text.
Args:
pattern_txt (str list): the patterns
Returns:
mpdlcd.display_pattern.ScreenPatternList: a list of patterns from the
given entries. | def _make_patterns(patterns):
field_registry = display_fields.FieldRegistry()
pattern_list = display_pattern.ScreenPatternList(
field_registry=field_registry,
)
for pattern in patterns:
pattern_list.add(pattern.split('\n'))
return pattern_list | 799,124 |
Extract options values from a configparser, optparse pair.
Options given on command line take precedence over options read in the
configuration file.
Args:
config (dict): option values read from a config file through
configparser
options (optparse.Options): optparse 'options' o... | def _extract_options(config, options, *args):
extract = {}
for key in args:
if key not in args:
continue
extract[key] = config[key]
option = getattr(options, key, None)
if option is not None:
extract[key] = option
return extract | 799,129 |
Receive an answer from the daemon and return the response.
Args:
socket (socket.socket): A socket that is connected to the daemon.
Returns:
dir or string: The unpickled answer. | def receive_data(socket):
answer = b""
while True:
packet = socket.recv(4096)
if not packet: break
answer += packet
response = pickle.loads(answer)
socket.close()
return response | 800,720 |
Connect to a daemon's socket.
Args:
root_dir (str): The directory that used as root by the daemon.
Returns:
socket.socket: A socket that is connected to the daemon. | def connect_socket(root_dir):
# Get config directory where the daemon socket is located
config_dir = os.path.join(root_dir, '.config/pueue')
# Create Socket and exit with 1, if socket can't be created
try:
client = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
socket_path = os.... | 800,721 |
Create a socket for the daemon, depending on the directory location.
Args:
config_dir (str): The absolute path to the config directory used by the daemon.
Returns:
socket.socket: The daemon socket. Clients connect to this socket. | def create_socket(self):
socket_path = os.path.join(self.config_dir, 'pueue.sock')
# Create Socket and exit with 1, if socket can't be created
try:
if os.path.exists(socket_path):
os.remove(socket_path)
self.socket = socket.socket(socket.AF_UNIX, ... | 801,186 |
Add a new command to the daemon queue.
Args:
args['command'] (list(str)): The actual programm call. Something like ['ls', '-a'] or ['ls -al']
root_dir (string): The path to the root directory the daemon is running in. | def execute_add(args, root_dir=None):
# We accept a list of strings.
# This is done to create a better commandline experience with argparse.
command = ' '.join(args['command'])
# Send new instruction to daemon
instruction = {
'command': command,
'path': os.getcwd()
}
p... | 801,230 |
Edit a existing queue command in the daemon.
Args:
args['key'] int: The key of the queue entry to be edited
root_dir (string): The path to the root directory the daemon is running in. | def execute_edit(args, root_dir=None):
# Get editor
EDITOR = os.environ.get('EDITOR', 'vim')
# Get command from server
key = args['key']
status = command_factory('status')({}, root_dir=root_dir)
# Check if queue is not empty, the entry exists and is queued or stashed
if not isinstance(... | 801,231 |
A factory which returns functions for direct daemon communication.
This factory will create a function which sends a payload to the daemon
and returns the unpickled object which is returned by the daemon.
Args:
command (string): The type of payload this should be. This determines
as wh... | def command_factory(command):
def communicate(body={}, root_dir=None):
client = connect_socket(root_dir)
body['mode'] = command
# Delete the func entry we use to call the correct function with argparse
# as functions can't be pickled and this shouldn't be send to the d... | 801,232 |
Print the status of the daemon.
This function displays the current status of the daemon as well
as the whole queue and all available information about every entry
in the queue.
`terminaltables` is used to format and display the queue contents.
`colorclass` is used to color format the various items ... | def execute_status(args, root_dir=None):
status = command_factory('status')({}, root_dir=root_dir)
# First rows, showing daemon status
if status['status'] == 'running':
status['status'] = Color('{autogreen}' + '{}'.format(status['status']) + '{/autogreen}')
elif status['status'] in ['paused... | 801,314 |
Print the current log file.
Args:
args['keys'] (int): If given, we only look at the specified processes.
root_dir (string): The path to the root directory the daemon is running in. | def execute_log(args, root_dir):
# Print the logs of all specified processes
if args.get('keys'):
config_dir = os.path.join(root_dir, '.config/pueue')
queue_path = os.path.join(config_dir, 'queue')
if os.path.exists(queue_path):
queue_file = open(queue_path, 'rb')
... | 801,315 |
Print stderr and stdout of the current running process.
Args:
args['watch'] (bool): If True, we open a curses session and tail
the output live in the console.
root_dir (string): The path to the root directory the daemon is running in. | def execute_show(args, root_dir):
key = None
if args.get('key'):
key = args['key']
status = command_factory('status')({}, root_dir=root_dir)
if key not in status['data'] or status['data'][key]['status'] != 'running':
print('No running process with this key, use `log` to ... | 801,316 |
Generate output key material based on an `info` value
Arguments:
- info - context to generate the OKM
- length - length in bytes of the key to generate
See the HKDF draft RFC for guidance. | def expand(self, info=b"", length=32):
return hkdf_expand(self._prk, info, length, self._hash) | 801,401 |
Return the server's IP address.
Params:
- addr_family: IPv4, IPv6 or None. None prefers IPv4 but will
return IPv6 if IPv4 addr was not available.
- access: 'public' or 'private' | def get_ip(self, access='public', addr_family=None, strict=None):
if addr_family not in ['IPv4', 'IPv6', None]:
raise Exception("`addr_family` must be 'IPv4', 'IPv6' or None")
if access not in ['private', 'public']:
raise Exception("`access` must be 'public' or 'private... | 801,438 |
Convet an epoch date to a tuple in format ("yyyy-mm-dd","hh:mm:ss")
Example: "1023456427" -> ("2002-06-07","15:27:07")
Parameters:
- `timestamp`: date in epoch format | def epochdate(timestamp):
dt = datetime.fromtimestamp(float(timestamp)).timetuple()
fecha = "{0:d}-{1:02d}-{2:02d}".format(dt.tm_year, dt.tm_mon, dt.tm_mday)
hora = "{0:02d}:{1:02d}:{2:02d}".format(dt.tm_hour, dt.tm_min, dt.tm_sec)
return (fecha, hora) | 802,180 |
Add given value to item (list)
Args:
val: A JSON serializable object.
Returns:
Cache backend response. | def add(self, val):
return cache.lpush(self.key, json.dumps(val) if self.serialize else val) | 802,525 |
Removes given item from the list.
Args:
val: Item
Returns:
Cache backend response. | def remove_item(self, val):
return cache.lrem(self.key, json.dumps(val)) | 802,527 |
Removes all keys of this namespace
Without args, clears all keys starting with cls.PREFIX
if called with args, clears keys starting with given cls.PREFIX + args
Args:
*args: Arbitrary number of arguments.
Returns:
List of removed keys. | def flush(cls, *args):
return _remove_keys([], [(cls._make_key(args) if args else cls.PREFIX) + '*']) | 802,528 |
Sends a message to possible owners of the current workflows
next lane.
Args:
**kwargs: ``current`` and ``possible_owners`` are required.
sender (User): User object | def send_message_for_lane_change(sender, **kwargs):
current = kwargs['current']
owners = kwargs['possible_owners']
if 'lane_change_invite' in current.task_data:
msg_context = current.task_data.pop('lane_change_invite')
else:
msg_context = DEFAULT_LANE_CHANGE_INVITE_MSG
wfi = WF... | 802,540 |
While splitting channel and moving chosen subscribers to new channel,
old channel's messages are copied and moved to new channel.
Args:
from_channel (Channel object): move messages from channel
to_channel (Channel object): move messages to channel | def copy_and_move_messages(from_channel, to_channel):
with BlockSave(Message, query_dict={'channel_id': to_channel.key}):
for message in Message.objects.filter(channel=from_channel, typ=15):
message.key = ''
message.channel = to_channel
messag... | 802,552 |
It shows incorrect operations or successful operation messages.
Args:
title (string): title of message box
box_type (string): type of message box (warning, info) | def show_warning_messages(self, title=_(u"Incorrect Operation"), box_type='warning'):
msg = self.current.task_data['msg']
self.current.output['msgbox'] = {'type': box_type, "title": title, "msg": msg}
del self.current.task_data['msg'] | 802,553 |
It returns chosen keys list from a given form.
Args:
form_info: serialized list of dict form data
Returns:
selected_keys(list): Chosen keys list
selected_names(list): Chosen channels' or subscribers' names. | def return_selected_form_items(form_info):
selected_keys = []
selected_names = []
for chosen in form_info:
if chosen['choice']:
selected_keys.append(chosen['key'])
selected_names.append(chosen['name'])
return selected_keys, selected_n... | 802,554 |
It controls the selection from the form according
to the operations, and returns an error message
if it does not comply with the rules.
Args:
form_info: Channel or subscriber form from the user
Returns: True or False
error message | def selection_error_control(self, form_info):
keys, names = self.return_selected_form_items(form_info['ChannelList'])
chosen_channels_number = len(keys)
if form_info['new_channel'] and chosen_channels_number < 2:
return False, _(
u"You should choose at least... | 802,555 |
Writes user data to session.
Args:
user: User object | def set_user(self, user):
self.session['user_id'] = user.key
self.session['user_data'] = user.clean_value()
role = self.get_role()
# TODO: this should be remembered from previous login
# self.session['role_data'] = default_role.clean_value()
self.session['role_id... | 802,596 |
Initializes the workflow with given request, response objects and diagram name.
Args:
session:
input:
workflow_name (str): Name of workflow diagram without ".bpmn" suffix.
File must be placed under one of configured :py:attr:`~zengine.settings.WORKFLOW_PACKAGES_... | def start_engine(self, **kwargs):
self.current = WFCurrent(**kwargs)
self.wf_state = {'in_external': False, 'finished': False}
if not self.current.new_token:
self.wf_state = self.current.wf_cache.get(self.wf_state)
self.current.workflow_name = self.wf_state['name... | 802,697 |
Imports the module that contains the referenced method.
Args:
path: python path of class/function
look_for_cls_method (bool): If True, treat the last part of path as class method.
Returns:
Tuple. (class object, class name, method to be called) | def _import_object(self, path, look_for_cls_method):
last_nth = 2 if look_for_cls_method else 1
path = path.split('.')
module_path = '.'.join(path[:-last_nth])
class_name = path[-last_nth]
module = importlib.import_module(module_path)
if look_for_cls_method and p... | 802,710 |
Creates some aliases for attributes of ``current``.
Args:
current: :attr:`~zengine.engine.WFCurrent` object. | def set_current(self, current):
self.current = current
self.input = current.input
# self.req = current.request
# self.resp = current.response
self.output = current.output
self.cmd = current.task_data['cmd']
if self.cmd and NEXT_CMD_SPLITTER in self.cmd:
... | 802,744 |
Renders form. Applies form modifiers, then writes
result to response payload. If supplied, given form
object instance will be used instead of view's
default ObjectForm.
Args:
_form (:py:attr:`~zengine.forms.json_form.JsonForm`):
Form object to override `self.o... | def form_out(self, _form=None):
_form = _form or self.object_form
self.output['forms'] = _form.serialize()
self._add_meta_props(_form)
self.output['forms']['grouping'] = _form.Meta.grouping
self.output['forms']['constraints'] = _form.Meta.constraints
self._patch_... | 802,746 |
Adds given cmd(s) to ``self.output['client_cmd']``
Args:
*args: Client commands. | def set_client_cmd(self, *args):
self.client_cmd.update(args)
self.output['client_cmd'] = list(self.client_cmd) | 802,747 |
load xml from given path
Args:
path: diagram path
Returns: | def get_wf_from_path(self, path):
with open(path) as fp:
content = fp.read()
return [(os.path.basename(os.path.splitext(path)[0]), content), ] | 802,774 |
Handles pagination of object listings.
Args:
current_page int:
Current page number
query_set (:class:`QuerySet<pyoko:pyoko.db.queryset.QuerySet>`):
Object listing queryset.
per_page int:
Objects per page.
Returns:
QuerySet object, pagination ... | def _paginate(self, current_page, query_set, per_page=10):
total_objects = query_set.count()
total_pages = int(total_objects / per_page or 1)
# add orphans to last page
current_per_page = per_page + (
total_objects % per_page if current_page == total_pages else 0)
pagination_data = dict... | 802,789 |
Creates a direct messaging channel between two user
Args:
initiator: User, who want's to make first contact
receiver: User, other party
Returns:
(Channel, receiver_name) | def get_or_create_direct_channel(cls, initiator_key, receiver_key):
existing = cls.objects.OR().filter(
code_name='%s_%s' % (initiator_key, receiver_key)).filter(
code_name='%s_%s' % (receiver_key, initiator_key))
receiver_name = UserModel.objects.get(receiver_key).full_... | 802,815 |
Serializes message for given user.
Note:
Should be called before first save(). Otherwise "is_update" will get wrong value.
Args:
user: User object
Returns:
Dict. JSON serialization ready dictionary object | def serialize(self, user=None):
return {
'content': self.body,
'type': self.typ,
'updated_at': self.updated_at,
'timestamp': self.updated_at,
'is_update': not hasattr(self, 'unsaved'),
'attachments': [attachment.serialize() for att... | 802,833 |
Kullanıcı şifresini encrypt ederek set eder.
Args:
raw_password (str) | def set_password(self, raw_password):
self.password = pbkdf2_sha512.encrypt(raw_password, rounds=10000,
salt_size=10) | 802,894 |
sends message to users private mq exchange
Args:
title:
message:
sender:
url:
typ: | def send_notification(self, title, message, typ=1, url=None, sender=None):
self.created_channels.channel.add_message(
channel_key=self.prv_exchange,
body=message,
title=title,
typ=typ,
url=url,
sender=sender,
receiver=s... | 802,900 |
Send arbitrary cmd and data to client
if queue name passed by "via_queue" parameter,
that queue will be used instead of users private exchange.
Args:
data: dict
cmd: string
via_queue: queue name, | def send_client_cmd(self, data, cmd=None, via_queue=None):
mq_channel = self._connect_mq()
if cmd:
data['cmd'] = cmd
if via_queue:
mq_channel.basic_publish(exchange='',
routing_key=via_queue,
... | 802,901 |
Soyut Role Permission nesnesi tanımlamayı sağlar.
Args:
perm (object): | def add_permission(self, perm):
self.Permissions(permission=perm)
PermissionCache.flush()
self.save() | 802,983 |
Adds a permission with given name.
Args:
code (str): Code name of the permission.
save (bool): If False, does nothing. | def add_permission_by_name(self, code, save=False):
if not save:
return ["%s | %s" % (p.name, p.code) for p in
Permission.objects.filter(code__contains=code)]
for p in Permission.objects.filter(code__contains=code):
if p not in self.Permissions:
... | 802,985 |
Disconnect receiver from sender for signal.
If weak references are used, disconnect need not be called. The receiver
will be remove from dispatch automatically.
Arguments:
receiver
The registered receiver to disconnect. May be none if
dispatch_uid i... | def disconnect(self, receiver=None, sender=None, dispatch_uid=None):
if dispatch_uid:
lookup_key = (dispatch_uid, _make_id(sender))
else:
lookup_key = (_make_id(receiver), _make_id(sender))
disconnected = False
with self.lock:
self._clear_de... | 802,996 |
Prepare links of form by mimicing pyoko's get_links method's result
Args:
**kw:
Returns: list of link dicts | def get_links(self, **kw):
links = [a for a in dir(self) if isinstance(getattr(self, a), Model)
and not a.startswith('_model')]
return [
{
'field': l,
'mdl': getattr(self, l).__class__,
} for l in links
] | 803,068 |
Fills form with data
Args:
data (dict): Data to assign form fields.
Returns:
Self. Form object. | def set_data(self, data):
for name in self._fields:
setattr(self, name, data.get(name))
return self | 803,071 |
Caches some form details to lates process and validate incoming (response) form data
Args:
form: form dict | def _cache_form_details(self, form):
cache = FormCache()
form['model']['form_key'] = cache.form_id
form['model']['form_name'] = self.__class__.__name__
cache.set(
{
'model': list(form['model'].keys()), # In Python 3, dictionary keys are not serializa... | 803,074 |
Sets user notification message.
Args:
title: Msg. title
msg: Msg. text
typ: Msg. type
url: Additional URL (if exists)
Returns:
Message ID. | def set_message(self, title, msg, typ, url=None):
return self.user.send_notification(title=title,
message=msg,
typ=typ,
url=url) | 803,101 |
Checks if current user (or role) has the given permission.
Args:
perm: Permmission code or object.
Depends on the :attr:`~zengine.auth.auth_backend.AuthBackend` implementation.
Returns:
Boolean. | def has_permission(self, perm):
return self.user.superuser or self.auth.has_permission(perm) | 803,103 |
Assigns current task step to self.task
then updates the task's data with self.task_data
Args:
task: Task object. | def _update_task(self, task):
self.task = task
self.task.data.update(self.task_data)
self.task_type = task.task_spec.__class__.__name__
self.spec = task.task_spec
self.task_name = task.get_name()
self.activity = getattr(self.spec, 'service_class', '')
sel... | 803,109 |
GET method handler
Args:
req: Request object.
resp: Response object. | def __init__(self, current):
import sys
read_existing = set(sys.PYOKO_LOGS['read']) - set(sys.PYOKO_LOGS['new'])
current.output = {
'response': "DB Access Stats: {}".format(str(sys.PYOKO_STAT_COUNTER),
str(read_existing)... | 803,153 |
GET method handler
Args:
req: Request object.
resp: Response object. | def __init__(self, current):
import sys
from pyoko.modelmeta import model_registry
out = []
for mdl_name in sys.PYOKO_LOGS.copy():
try:
mdl = model_registry.get_model(mdl_name)
except KeyError:
continue
bucket_n... | 803,154 |
Creates a menu entry for given model data.
Updates results in place.
Args:
model_data: Model data.
object_type: Relation name.
results: Results dict. | def _add_crud(self, model_data, object_type, results):
model = model_registry.get_model(model_data['name'])
field_name = model_data.get('field')
verbose_name = model_data.get('verbose_name', model.Meta.verbose_name_plural)
category = model_data.get('category', settings.DEFAULT_O... | 803,196 |
Appends menu entries to dashboard quickmenu according
to :attr:`zengine.settings.QUICK_MENU`
Args:
key: workflow name
wf: workflow menu entry | def _add_to_quick_menu(self, key, wf):
if key in settings.QUICK_MENU:
self.output['quick_menu'].append(wf) | 803,197 |
AMQP connection callback.
Creates input channel.
Args:
connection: AMQP connection | def on_connected(self, connection):
log.info('PikaClient: connected to RabbitMQ')
self.connected = True
self.in_channel = self.connection.channel(self.on_channel_open) | 803,206 |
Input channel creation callback
Queue declaration done here
Args:
channel: input channel | def on_channel_open(self, channel):
self.in_channel.exchange_declare(exchange='input_exc', type='topic', durable=True)
channel.queue_declare(callback=self.on_input_queue_declare, queue=self.INPUT_QUEUE_NAME) | 803,207 |
Input queue declaration callback.
Input Queue/Exchange binding done here
Args:
queue: input queue | def on_input_queue_declare(self, queue):
self.in_channel.queue_bind(callback=None,
exchange='input_exc',
queue=self.INPUT_QUEUE_NAME,
routing_key="#") | 803,208 |
Overrides ProcessParser.parse_node
Parses and attaches the inputOutput tags that created by Camunda Modeller
Args:
node: xml task node
Returns:
TaskSpec | def parse_node(self, node):
spec = super(CamundaProcessParser, self).parse_node(node)
spec.data = self._parse_input_data(node)
spec.data['lane_data'] = self._get_lane_properties(node)
spec.defines = spec.data
service_class = node.get(full_attr('assignee'))
if ser... | 803,317 |
Parses inputOutput part camunda modeller extensions.
Args:
node: SpiffWorkflow Node object.
Returns:
Data dict. | def _parse_input_data(self, node):
data = DotDict()
try:
for nod in self._get_input_nodes(node):
data.update(self._parse_input_node(nod))
except Exception as e:
log.exception("Error while processing node: %s" % node)
return data | 803,321 |
Parses the given XML node
Args:
node (xml): XML node.
.. code-block:: xml
<bpmn2:lane id="Lane_8" name="Lane 8">
<bpmn2:extensionElements>
<camunda:properties>
<camunda:property value="foo,bar" name="perms"/>
... | def _get_lane_properties(self, node):
lane_name = self.get_lane(node.get('id'))
lane_data = {'name': lane_name}
for a in self.xpath(".//bpmn:lane[@name='%s']/*/*/" % lane_name):
lane_data[a.attrib['name']] = a.attrib['value'].strip()
return lane_data | 803,323 |
Generates wf packages from workflow diagrams.
Args:
workflow_name: Name of wf
workflow_files: Diagram file.
Returns:
Workflow package (file like) object | def package_in_memory(cls, workflow_name, workflow_files):
s = StringIO()
p = cls(s, workflow_name, meta_data=[])
p.add_bpmn_files_by_glob(workflow_files)
p.create_package()
return s.getvalue() | 803,327 |
this is a pika.basic_consumer callback
handles client inputs, runs appropriate workflows and views
Args:
ch: amqp channel
method: amqp method
properties:
body: message body | def handle_message(self, ch, method, properties, body):
input = {}
headers = {}
try:
self.sessid = method.routing_key
input = json_decode(body)
data = input['data']
# since this comes as "path" we dont know if it's view or workflow yet
... | 803,378 |
if xml content updated, create a new entry for given wf name
Args:
name: name of wf
content: xml content
Returns (DiagramXML(), bool): A tuple with two members.
(DiagramXML instance and True if it's new or False it's already exists | def get_or_create_by_content(cls, name, content):
new = False
diagrams = cls.objects.filter(name=name)
if diagrams:
diagram = diagrams[0]
if diagram.body != content:
new = True
else:
new = True
if new:
diagr... | 803,389 |
updates xml link if there aren't any running instances of this wf
Args:
diagram: XMLDiagram object | def set_xml(self, diagram, force=False):
no_of_running = WFInstance.objects.filter(wf=self, finished=False, started=True).count()
if no_of_running and not force:
raise RunningInstancesExist(
"Can't update WF diagram! Running %s WF instances exists for %s" % (
... | 803,394 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.