code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1
value |
|---|---|---|---|---|---|
"""Put an OpenAI Gym environment into the TensorFlow graph."""
import gym
import tensorflow as tf
class InGraphEnv(object):
"""Put an OpenAI Gym environment into the TensorFlow graph.
The environment will be stepped and reset inside of the graph using
tf.py_func(). The current observation, action, reward, and done flag are held
in according variables.
"""
def __init__(self, env):
"""Put an OpenAI Gym environment into the TensorFlow graph.
Args:
env: OpenAI Gym environment.
"""
self._env = env
observ_shape = self._parse_shape(self._env.observation_space)
observ_dtype = self._parse_dtype(self._env.observation_space)
action_shape = self._parse_shape(self._env.action_space)
action_dtype = self._parse_dtype(self._env.action_space)
with tf.name_scope('environment'):
self._observ = tf.Variable(tf.zeros(observ_shape, observ_dtype),
name='observ',
trainable=False)
self._action = tf.Variable(tf.zeros(action_shape, action_dtype),
name='action',
trainable=False)
self._reward = tf.Variable(0.0, dtype=tf.float32, name='reward', trainable=False)
self._done = tf.Variable(True, dtype=tf.bool, name='done', trainable=False)
self._step = tf.Variable(0, dtype=tf.int32, name='step', trainable=False)
def __getattr__(self, name):
"""Forward unimplemented attributes to the original environment.
Args:
name: Attribute that was accessed.
Returns:
Value behind the attribute name in the wrapped environment.
"""
return getattr(self._env, name)
def simulate(self, action):
"""Step the environment.
The result of the step can be accessed from the variables defined below.
Args:
action: Tensor holding the action to apply.
Returns:
Operation.
"""
with tf.name_scope('environment/simulate'):
if action.dtype in (tf.float16, tf.float32, tf.float64):
action = tf.check_numerics(action, 'action')
observ_dtype = self._parse_dtype(self._env.observation_space)
observ, reward, done = tf.py_func(lambda a: self._env.step(a)[:3], [action],
[observ_dtype, tf.float32, tf.bool],
name='step')
observ = tf.check_numerics(observ, 'observ')
reward = tf.check_numerics(reward, 'reward')
return tf.group(self._observ.assign(observ), self._action.assign(action),
self._reward.assign(reward), self._done.assign(done),
self._step.assign_add(1))
def reset(self):
"""Reset the environment.
Returns:
Tensor of the current observation.
"""
observ_dtype = self._parse_dtype(self._env.observation_space)
observ = tf.py_func(self._env.reset, [], observ_dtype, name='reset')
observ = tf.check_numerics(observ, 'observ')
with tf.control_dependencies(
[self._observ.assign(observ),
self._reward.assign(0),
self._done.assign(False)]):
return tf.identity(observ)
@property
def observ(self):
"""Access the variable holding the current observation."""
return self._observ
@property
def action(self):
"""Access the variable holding the last recieved action."""
return self._action
@property
def reward(self):
"""Access the variable holding the current reward."""
return self._reward
@property
def done(self):
"""Access the variable indicating whether the episode is done."""
return self._done
@property
def step(self):
"""Access the variable containg total steps of this environment."""
return self._step
def _parse_shape(self, space):
"""Get a tensor shape from a OpenAI Gym space.
Args:
space: Gym space.
Returns:
Shape tuple.
"""
if isinstance(space, gym.spaces.Discrete):
return ()
if isinstance(space, gym.spaces.Box):
return space.shape
raise NotImplementedError()
def _parse_dtype(self, space):
"""Get a tensor dtype from a OpenAI Gym space.
Args:
space: Gym space.
Returns:
TensorFlow data type.
"""
if isinstance(space, gym.spaces.Discrete):
return tf.int32
if isinstance(space, gym.spaces.Box):
return tf.float32
raise NotImplementedError() | /rex_gym-0.2.7.tar.gz/rex_gym-0.2.7/rex_gym/agents/tools/in_graph_env.py | 0.957873 | 0.801819 | in_graph_env.py | pypi |
"""Wrappers for OpenAI Gym environments."""
import atexit
import functools
import multiprocessing
import sys
import traceback
import gym
import gym.spaces
import numpy as np
import tensorflow as tf
class AutoReset(object):
"""Automatically reset environment when the episode is done."""
def __init__(self, env):
self._env = env
self._done = True
def __getattr__(self, name):
return getattr(self._env, name)
def step(self, action):
if self._done:
observ, reward, done, info = self._env.reset(), 0.0, False, {}
else:
observ, reward, done, info = self._env.step(action)
self._done = done
return observ, reward, done, info
def reset(self):
self._done = False
return self._env.reset()
class ActionRepeat(object):
"""Repeat the agent action multiple steps."""
def __init__(self, env, amount):
self._env = env
self._amount = amount
def __getattr__(self, name):
return getattr(self._env, name)
def step(self, action):
done = False
total_reward = 0
current_step = 0
while current_step < self._amount and not done:
observ, reward, done, info = self._env.step(action)
total_reward += reward
current_step += 1
return observ, total_reward, done, info
class RandomStart(object):
"""Perform random number of random actions at the start of the episode."""
def __init__(self, env, max_steps):
self._env = env
self._max_steps = max_steps
def __getattr__(self, name):
return getattr(self._env, name)
def reset(self):
observ = self._env.reset()
random_steps = np.random.randint(0, self._max_steps)
for _ in range(random_steps):
action = self._env.action_space.sample()
observ, unused_reward, done, unused_info = self._env.step(action)
if done:
tf.logging.warning('Episode ended during random start.')
return self.reset()
return observ
class FrameHistory(object):
"""Augment the observation with past observations."""
def __init__(self, env, past_indices, flatten):
"""Augment the observation with past observations.
Implemented as a Numpy ring buffer holding the necessary past observations.
Args:
env: OpenAI Gym environment to wrap.
past_indices: List of non-negative integers indicating the time offsets
from the current time step of observations to include.
flatten: Concatenate the past observations rather than stacking them.
Raises:
KeyError: The current observation is not included in the indices.
"""
if 0 not in past_indices:
raise KeyError('Past indices should include 0 for the current frame.')
self._env = env
self._past_indices = past_indices
self._step = 0
self._buffer = None
self._capacity = max(past_indices)
self._flatten = flatten
def __getattr__(self, name):
return getattr(self._env, name)
@property
def observation_space(self):
low = self._env.observation_space.low
high = self._env.observation_space.high
low = np.repeat(low[None, ...], len(self._past_indices), 0)
high = np.repeat(high[None, ...], len(self._past_indices), 0)
if self._flatten:
low = np.reshape(low, (-1,) + low.shape[2:])
high = np.reshape(high, (-1,) + high.shape[2:])
return gym.spaces.Box(low, high)
def step(self, action):
observ, reward, done, info = self._env.step(action)
self._step += 1
self._buffer[self._step % self._capacity] = observ
observ = self._select_frames()
return observ, reward, done, info
def reset(self):
observ = self._env.reset()
self._buffer = np.repeat(observ[None, ...], self._capacity, 0)
self._step = 0
return self._select_frames()
def _select_frames(self):
indices = [(self._step - index) % self._capacity for index in self._past_indices]
observ = self._buffer[indices]
if self._flatten:
observ = np.reshape(observ, (-1,) + observ.shape[2:])
return observ
class FrameDelta(object):
"""Convert the observation to a difference from the previous observation."""
def __init__(self, env):
self._env = env
self._last = None
def __getattr__(self, name):
return getattr(self._env, name)
@property
def observation_space(self):
low = self._env.observation_space.low
high = self._env.observation_space.high
low, high = low - high, high - low
return gym.spaces.Box(low, high)
def step(self, action):
observ, reward, done, info = self._env.step(action)
delta = observ - self._last
self._last = observ
return delta, reward, done, info
def reset(self):
observ = self._env.reset()
self._last = observ
return observ
class RangeNormalize(object):
"""Normalize the specialized observation and action ranges to [-1, 1]."""
def __init__(self, env, observ=None, action=None):
self._env = env
self._should_normalize_observ = (observ is not False and
self._is_finite(self._env.observation_space))
if observ is True and not self._should_normalize_observ:
raise ValueError('Cannot normalize infinite observation range.')
if observ is None and not self._should_normalize_observ:
tf.logging.info('Not normalizing infinite observation range.')
self._should_normalize_action = (action is not False and
self._is_finite(self._env.action_space))
if action is True and not self._should_normalize_action:
raise ValueError('Cannot normalize infinite action range.')
if action is None and not self._should_normalize_action:
tf.logging.info('Not normalizing infinite action range.')
def __getattr__(self, name):
return getattr(self._env, name)
@property
def observation_space(self):
space = self._env.observation_space
if not self._should_normalize_observ:
return space
return gym.spaces.Box(-np.ones(space.shape), np.ones(space.shape))
@property
def action_space(self):
space = self._env.action_space
if not self._should_normalize_action:
return space
return gym.spaces.Box(-np.ones(space.shape), np.ones(space.shape))
def step(self, action):
if self._should_normalize_action:
action = self._denormalize_action(action)
observ, reward, done, info = self._env.step(action)
if self._should_normalize_observ:
observ = self._normalize_observ(observ)
return observ, reward, done, info
def reset(self):
observ = self._env.reset()
if self._should_normalize_observ:
observ = self._normalize_observ(observ)
return observ
def _denormalize_action(self, action):
min_ = self._env.action_space.low
max_ = self._env.action_space.high
action = (action + 1) / 2 * (max_ - min_) + min_
return action
def _normalize_observ(self, observ):
min_ = self._env.observation_space.low
max_ = self._env.observation_space.high
observ = 2 * (observ - min_) / (max_ - min_) - 1
return observ
def _is_finite(self, space):
return np.isfinite(space.low).all() and np.isfinite(space.high).all()
class ClipAction(object):
"""Clip out of range actions to the action space of the environment."""
def __init__(self, env):
self._env = env
def __getattr__(self, name):
return getattr(self._env, name)
@property
def action_space(self):
shape = self._env.action_space.shape
return gym.spaces.Box(-np.inf * np.ones(shape), np.inf * np.ones(shape))
def step(self, action):
action_space = self._env.action_space
action = np.clip(action, action_space.low, action_space.high)
return self._env.step(action)
class LimitDuration(object):
"""End episodes after specified number of steps."""
def __init__(self, env, duration):
self._env = env
self._duration = duration
self._step = None
def __getattr__(self, name):
return getattr(self._env, name)
def step(self, action):
if self._step is None:
raise RuntimeError('Must reset environment.')
observ, reward, done, info = self._env.step(action)
self._step += 1
if self._step >= self._duration:
done = True
self._step = None
return observ, reward, done, info
def reset(self):
self._step = 0
return self._env.reset()
class ExternalProcess(object):
"""Step environment in a separate process for lock free paralellism."""
# Message types for communication via the pipe.
_ACTION = 1
_RESET = 2
_CLOSE = 3
_ATTRIBUTE = 4
_TRANSITION = 5
_OBSERV = 6
_EXCEPTION = 7
_VALUE = 8
def __init__(self, constructor):
"""Step environment in a separate process for lock free paralellism.
The environment will be created in the external process by calling the
specified callable. This can be an environment class, or a function
creating the environment and potentially wrapping it. The returned
environment should not access global variables.
Args:
constructor: Callable that creates and returns an OpenAI gym environment.
Attributes:
observation_space: The cached observation space of the environment.
action_space: The cached action space of the environment.
"""
self._conn, conn = multiprocessing.Pipe()
self._process = multiprocessing.Process(target=self._worker, args=(constructor, conn))
atexit.register(self.close)
self._process.start()
self._observ_space = None
self._action_space = None
@property
def observation_space(self):
if not self._observ_space:
self._observ_space = self.__getattr__('observation_space')
return self._observ_space
@property
def action_space(self):
if not self._action_space:
self._action_space = self.__getattr__('action_space')
return self._action_space
def __getattr__(self, name):
"""Request an attribute from the environment.
Note that this involves communication with the external process, so it can
be slow.
Args:
name: Attribute to access.
Returns:
Value of the attribute.
"""
self._conn.send((self._ATTRIBUTE, name))
return self._receive(self._VALUE)
def step(self, action, blocking=True):
"""Step the environment.
Args:
action: The action to apply to the environment.
blocking: Whether to wait for the result.
Returns:
Transition tuple when blocking, otherwise callable that returns the
transition tuple.
"""
self._conn.send((self._ACTION, action))
if blocking:
return self._receive(self._TRANSITION)
else:
return functools.partial(self._receive, self._TRANSITION)
def reset(self, blocking=True):
"""Reset the environment.
Args:
blocking: Whether to wait for the result.
Returns:
New observation when blocking, otherwise callable that returns the new
observation.
"""
self._conn.send((self._RESET, None))
if blocking:
return self._receive(self._OBSERV)
else:
return functools.partial(self._receive, self._OBSERV)
def close(self):
"""Send a close message to the external process and join it."""
try:
self._conn.send((self._CLOSE, None))
self._conn.close()
except IOError:
# The connection was already closed.
pass
self._process.join()
def _receive(self, expected_message):
"""Wait for a message from the worker process and return its payload.
Args:
expected_message: Type of the expected message.
Raises:
Exception: An exception was raised inside the worker process.
KeyError: The reveived message is not of the expected type.
Returns:
Payload object of the message.
"""
message, payload = self._conn.recv()
# Re-raise exceptions in the main process.
if message == self._EXCEPTION:
stacktrace = payload
raise Exception(stacktrace)
if message == expected_message:
return payload
raise KeyError('Received message of unexpected type {}'.format(message))
def _worker(self, constructor, conn):
"""The process waits for actions and sends back environment results.
Args:
constructor: Constructor for the OpenAI Gym environment.
conn: Connection for communication to the main process.
"""
try:
env = constructor()
while True:
try:
# Only block for short times to have keyboard exceptions be raised.
if not conn.poll(0.1):
continue
message, payload = conn.recv()
except (EOFError, KeyboardInterrupt):
break
if message == self._ACTION:
action = payload
conn.send((self._TRANSITION, env.step(action)))
continue
if message == self._RESET:
assert payload is None
conn.send((self._OBSERV, env.reset()))
continue
if message == self._ATTRIBUTE:
name = payload
conn.send((self._VALUE, getattr(env, name)))
continue
if message == self._CLOSE:
assert payload is None
break
raise KeyError('Received message of unknown type {}'.format(message))
except Exception: # pylint: disable=broad-except
stacktrace = ''.join(traceback.format_exception(*sys.exc_info()))
conn.send((self._EXCEPTION, stacktrace))
tf.compat.v1.logging.error('Error in environment process: {}'.format(stacktrace))
conn.close()
class ConvertTo32Bit(object):
"""Convert data types of an OpenAI Gym environment to 32 bit."""
def __init__(self, env):
"""Convert data types of an OpenAI Gym environment to 32 bit.
Args:
env: OpenAI Gym environment.
"""
self._env = env
def __getattr__(self, name):
"""Forward unimplemented attributes to the original environment.
Args:
name: Attribute that was accessed.
Returns:
Value behind the attribute name in the wrapped environment.
"""
return getattr(self._env, name)
def step(self, action):
"""Forward action to the wrapped environment.
Args:
action: Action to apply to the environment.
Raises:
ValueError: Invalid action.
Returns:
Converted observation, converted reward, done flag, and info object.
"""
observ, reward, done, info = self._env.step(action)
observ = self._convert_observ(observ)
reward = self._convert_reward(reward)
return observ, reward, done, info
def reset(self):
"""Reset the environment and convert the resulting observation.
Returns:
Converted observation.
"""
observ = self._env.reset()
observ = self._convert_observ(observ)
return observ
def _convert_observ(self, observ):
"""Convert the observation to 32 bits.
Args:
observ: Numpy observation.
Raises:
ValueError: Observation contains infinite values.
Returns:
Numpy observation with 32-bit data type.
"""
if not np.isfinite(observ).all():
raise ValueError('Infinite observation encountered.')
if observ.dtype == np.float64:
return observ.astype(np.float32)
if observ.dtype == np.int64:
return observ.astype(np.int32)
return observ
def _convert_reward(self, reward):
"""Convert the reward to 32 bits.
Args:
reward: Numpy reward.
Raises:
ValueError: Rewards contain infinite values.
Returns:
Numpy reward with 32-bit data type.
"""
if not np.isfinite(reward).all():
raise ValueError('Infinite reward encountered.')
return np.array(reward, dtype=np.float32) | /rex_gym-0.2.7.tar.gz/rex_gym-0.2.7/rex_gym/agents/tools/wrappers.py | 0.8505 | 0.3174 | wrappers.py | pypi |
"""In-graph simulation step of a vecrotized algorithm with environments."""
import tensorflow as tf
from . import streaming_mean
def simulate(batch_env, algo, log=True, reset=False):
"""Simulation step of a vecrotized algorithm with in-graph environments.
Integrates the operations implemented by the algorithm and the environments
into a combined operation.
Args:
batch_env: In-graph batch environment.
algo: Algorithm instance implementing required operations.
log: Tensor indicating whether to compute and return summaries.
reset: Tensor causing all environments to reset.
Returns:
Tuple of tensors containing done flags for the current episodes, possibly
intermediate scores for the episodes, and a summary tensor.
"""
def _define_begin_episode(agent_indices):
"""Reset environments, intermediate scores and durations for new episodes.
Args:
agent_indices: Tensor containing batch indices starting an episode.
Returns:
Summary tensor.
"""
assert agent_indices.shape.ndims == 1
zero_scores = tf.zeros_like(agent_indices, tf.float32)
zero_durations = tf.zeros_like(agent_indices)
reset_ops = [
batch_env.reset(agent_indices),
tf.compat.v1.scatter_update(score, agent_indices, zero_scores),
tf.compat.v1.scatter_update(length, agent_indices, zero_durations)
]
with tf.control_dependencies(reset_ops):
return algo.begin_episode(agent_indices)
def _define_step():
"""Request actions from the algorithm and apply them to the environments.
Increments the lengths of all episodes and increases their scores by the
current reward. After stepping the environments, provides the full
transition tuple to the algorithm.
Returns:
Summary tensor.
"""
prevob = batch_env.observ + 0 # Ensure a copy of the variable value.
action, step_summary = algo.perform(prevob)
action.set_shape(batch_env.action.shape)
with tf.control_dependencies([batch_env.simulate(action)]):
add_score = score.assign_add(batch_env.reward)
inc_length = length.assign_add(tf.ones(len(batch_env), tf.int32))
with tf.control_dependencies([add_score, inc_length]):
experience_summary = algo.experience(prevob, batch_env.action, batch_env.reward,
batch_env.done, batch_env.observ)
return tf.compat.v1.summary.merge([step_summary, experience_summary])
def _define_end_episode(agent_indices):
"""Notify the algorithm of ending episodes.
Also updates the mean score and length counters used for summaries.
Args:
agent_indices: Tensor holding batch indices that end their episodes.
Returns:
Summary tensor.
"""
assert agent_indices.shape.ndims == 1
submit_score = mean_score.submit(tf.gather(score, agent_indices))
submit_length = mean_length.submit(tf.cast(tf.gather(length, agent_indices), tf.float32))
with tf.control_dependencies([submit_score, submit_length]):
return algo.end_episode(agent_indices)
def _define_summaries():
"""Reset the average score and duration, and return them as summary.
Returns:
Summary string.
"""
score_summary = tf.cond(tf.logical_and(log, tf.cast(
mean_score.count, tf.bool)), lambda: tf.compat.v1.summary.scalar('mean_score', mean_score.clear()),
str)
length_summary = tf.cond(tf.logical_and(
log, tf.cast(mean_length.count,
tf.bool)), lambda: tf.compat.v1.summary.scalar('mean_length', mean_length.clear()), str)
return tf.compat.v1.summary.merge([score_summary, length_summary])
with tf.name_scope('simulate'):
log = tf.convert_to_tensor(log)
reset = tf.convert_to_tensor(reset)
with tf.compat.v1.variable_scope('simulate_temporary'):
score = tf.Variable(tf.zeros(len(batch_env), dtype=tf.float32), False, name='score')
length = tf.Variable(tf.zeros(len(batch_env), dtype=tf.int32), False, name='length')
mean_score = streaming_mean.StreamingMean((), tf.float32)
mean_length = streaming_mean.StreamingMean((), tf.float32)
agent_indices = tf.cond(reset, lambda: tf.range(len(batch_env)), lambda: tf.cast(
tf.where(batch_env.done)[:, 0], tf.int32))
begin_episode = tf.cond(tf.cast(tf.shape(agent_indices)[0],
tf.bool), lambda: _define_begin_episode(agent_indices), str)
with tf.control_dependencies([begin_episode]):
step = _define_step()
with tf.control_dependencies([step]):
agent_indices = tf.cast(tf.where(batch_env.done)[:, 0], tf.int32)
end_episode = tf.cond(tf.cast(tf.shape(agent_indices)[0],
tf.bool), lambda: _define_end_episode(agent_indices), str)
with tf.control_dependencies([end_episode]):
summary = tf.compat.v1.summary.merge([_define_summaries(), begin_episode, step, end_episode])
with tf.control_dependencies([summary]):
done, score = tf.identity(batch_env.done), tf.identity(score)
return done, score, summary | /rex_gym-0.2.7.tar.gz/rex_gym-0.2.7/rex_gym/agents/tools/simulate.py | 0.96366 | 0.809878 | simulate.py | pypi |
import math
import random
from gym import spaces
import numpy as np
from rex_gym.model.gait_planner import GaitPlanner
from rex_gym.util import pybullet_data
from .. import rex_gym_env
from ...model import rex_constants
from ...model.kinematics import Kinematics
from ...model.rex import Rex
NUM_LEGS = 4
STEP_PERIOD = 1.0 / 10.0 # 10 steps per second.
class RexTurnEnv(rex_gym_env.RexGymEnv):
"""The gym environment for the rex.
It simulates the locomotion of a rex, a quadruped robot. The state space
include the angles, velocities and torques for all the motors and the action
space is the desired motor angle for each motor. The reward function is based
on how far the rex walks in 1000 steps and penalizes the energy expenditure."""
metadata = {"render.modes": ["human", "rgb_array"], "video.frames_per_second": 66}
load_ui = True
is_terminating = False
def __init__(self,
debug=False,
urdf_version=None,
control_time_step=0.005,
action_repeat=5,
control_latency=0,
pd_latency=0,
on_rack=False,
motor_kp=1.0,
motor_kd=0.02,
render=False,
num_steps_to_log=1000,
env_randomizer=None,
log_path=None,
target_orient=None,
init_orient=None,
signal_type="ik",
terrain_type="plane",
terrain_id=None,
mark='base'):
"""Initialize the rex alternating legs gym environment.
Args:
urdf_version: [DEFAULT_URDF_VERSION, DERPY_V0_URDF_VERSION] are allowable
versions. If None, DEFAULT_URDF_VERSION is used. Refer to
rex_gym_env for more details.
control_time_step: The time step between two successive control signals.
action_repeat: The number of simulation steps that an action is repeated.
control_latency: The latency between get_observation() and the actual
observation. See rex.py for more details.
pd_latency: The latency used to get motor angles/velocities used to
compute PD controllers. See rex.py for more details.
on_rack: Whether to place the rex on rack. This is only used to debug
the walk gait. In this mode, the rex's base is hung midair so
that its walk gait is clearer to visualize.
motor_kp: The P gain of the motor.
motor_kd: The D gain of the motor.
render: Whether to render the simulation.
num_steps_to_log: The max number of control steps in one episode. If the
number of steps is over num_steps_to_log, the environment will still
be running, but only first num_steps_to_log will be recorded in logging.
env_randomizer: An instance (or a list) of EnvRanzomier(s) that can
randomize the environment during when env.reset() is called and add
perturbations when env.step() is called.
log_path: The path to write out logs. For the details of logging, refer to
rex_logging.proto.
"""
super(RexTurnEnv, self).__init__(
debug=debug,
urdf_version=urdf_version,
accurate_motor_model_enabled=True,
motor_overheat_protection=True,
hard_reset=False,
motor_kp=motor_kp,
motor_kd=motor_kd,
control_latency=control_latency,
pd_latency=pd_latency,
on_rack=on_rack,
render=render,
num_steps_to_log=num_steps_to_log,
env_randomizer=env_randomizer,
log_path=log_path,
control_time_step=control_time_step,
action_repeat=action_repeat,
target_orient=target_orient,
signal_type=signal_type,
init_orient=init_orient,
terrain_id=terrain_id,
terrain_type=terrain_type,
mark=mark)
action_max = {
'ik': 0.01,
'ol': 0.01
}
action_dim_map = {
'ik': 2,
'ol': 2
}
action_dim = action_dim_map[self._signal_type]
action_high = np.array([action_max[self._signal_type]] * action_dim)
self.action_space = spaces.Box(-action_high, action_high)
self._cam_dist = 1.1
self._cam_yaw = 30
self._cam_pitch = -30
self._signal_type = signal_type
# we need an alternate gait, so walk will works
self._gait_planner = GaitPlanner("walk")
self._kinematics = Kinematics()
self._target_orient = target_orient
self._init_orient = init_orient
self._random_orient_target = False
self._random_orient_start = False
self._cube = None
self.goal_reached = False
self._stay_still = False
self.is_terminating = False
if self._on_rack:
self._cam_pitch = 0
def reset(self):
self.goal_reached = False
self.is_terminating = False
self._stay_still = False
self.init_pose = rex_constants.INIT_POSES["stand"]
if self._signal_type == 'ol':
self.init_pose = rex_constants.INIT_POSES["stand_ol"]
super(RexTurnEnv, self).reset(initial_motor_angles=self.init_pose, reset_duration=0.5)
if not self._target_orient or self._random_orient_target:
self._target_orient = random.uniform(0.2, 6)
self._random_orient_target = True
if self._on_rack:
# on rack debug simulation
self._init_orient = 2.1
position = self.rex.init_on_rack_position
else:
position = self.rex.init_position
if self._init_orient is None or self._random_orient_start:
self._init_orient = random.uniform(0.2, 6)
self._random_orient_start = True
self.clockwise = self._solve_direction()
if self._is_debug:
print(f"Start Orientation: {self._init_orient}, Target Orientation: {self._target_orient}")
print("Turning right") if self.clockwise else print("Turning left")
if self._is_render and self._signal_type == 'ik':
if self.load_ui:
self.setup_ui()
self.load_ui = False
self._load_cube(self._target_orient)
q = self.pybullet_client.getQuaternionFromEuler([0, 0, self._init_orient])
self.pybullet_client.resetBasePositionAndOrientation(self.rex.quadruped, position, q)
return self._get_observation()
def setup_ui(self):
self.base_x_ui = self._pybullet_client.addUserDebugParameter("base_x",
self._ranges["base_x"][0],
self._ranges["base_x"][1],
0.009)
self.base_y_ui = self._pybullet_client.addUserDebugParameter("base_y",
self._ranges["base_y"][0],
self._ranges["base_y"][1],
self._ranges["base_y"][2])
self.base_z_ui = self._pybullet_client.addUserDebugParameter("base_z",
self._ranges["base_z"][0],
self._ranges["base_z"][1],
self._ranges["base_z"][2])
self.roll_ui = self._pybullet_client.addUserDebugParameter("roll",
self._ranges["roll"][0],
self._ranges["roll"][1],
self._ranges["roll"][2])
self.pitch_ui = self._pybullet_client.addUserDebugParameter("pitch",
self._ranges["pitch"][0],
self._ranges["pitch"][1],
self._ranges["pitch"][2])
self.yaw_ui = self._pybullet_client.addUserDebugParameter("yaw",
self._ranges["yaw"][0],
self._ranges["yaw"][1],
self._ranges["yaw"][2])
self.step_length_ui = self._pybullet_client.addUserDebugParameter("step_length", -0.7, 0.7, 0.02)
self.step_rotation_ui = self._pybullet_client.addUserDebugParameter("step_rotation", -1.5, 1.5, 0.5)
self.step_angle_ui = self._pybullet_client.addUserDebugParameter("step_angle", -180., 180., 0.)
self.step_period_ui = self._pybullet_client.addUserDebugParameter("step_period", 0.2, 0.9, 0.75)
def _read_inputs(self, base_pos_coeff, gait_stage_coeff):
position = np.array(
[
self._pybullet_client.readUserDebugParameter(self.base_x_ui),
self._pybullet_client.readUserDebugParameter(self.base_y_ui) * base_pos_coeff,
self._pybullet_client.readUserDebugParameter(self.base_z_ui) * base_pos_coeff
]
)
orientation = np.array(
[
self._pybullet_client.readUserDebugParameter(self.roll_ui) * base_pos_coeff,
self._pybullet_client.readUserDebugParameter(self.pitch_ui) * base_pos_coeff,
self._pybullet_client.readUserDebugParameter(self.yaw_ui) * base_pos_coeff
]
)
step_length = self._pybullet_client.readUserDebugParameter(self.step_length_ui) * gait_stage_coeff
step_rotation = self._pybullet_client.readUserDebugParameter(self.step_rotation_ui) * gait_stage_coeff
step_angle = self._pybullet_client.readUserDebugParameter(self.step_angle_ui)
step_period = self._pybullet_client.readUserDebugParameter(self.step_period_ui)
return position, orientation, step_length, step_rotation, step_angle, step_period
def _signal(self, t, action):
if self._signal_type == 'ik':
return self._IK_signal(t, action)
if self._signal_type == 'ol':
return self._open_loop_signal(t, action)
@staticmethod
def _evaluate_base_stage_coeff(current_t, end_t=0.0, width=0.001):
# sigmoid function
beta = p = width
if p - beta + end_t <= current_t <= p - (beta / 2) + end_t:
return (2 / beta ** 2) * (current_t - p + beta) ** 2
elif p - (beta/2) + end_t <= current_t <= p + end_t:
return 1 - (2 / beta ** 2) * (current_t - p) ** 2
else:
return 1
@staticmethod
def _evaluate_gait_stage_coeff(current_t, end_t=0.0):
# ramp function
p = .8
if end_t <= current_t <= p + end_t:
return current_t
else:
return 1.0
def _IK_signal(self, t, action):
base_pos_coeff = self._evaluate_base_stage_coeff(t, width=1.5)
gait_stage_coeff = self._evaluate_gait_stage_coeff(t)
if self._is_render and self._is_debug:
position, orientation, step_length, step_rotation, step_angle, step_period = \
self._read_inputs(base_pos_coeff, gait_stage_coeff)
else:
step_dir_value = -0.5 * gait_stage_coeff
if self.clockwise:
step_dir_value *= -1
position = np.array([0.009,
self._base_y * base_pos_coeff,
self._base_z * base_pos_coeff])
orientation = np.array([self._base_roll * base_pos_coeff,
self._base_pitch * base_pos_coeff,
self._base_yaw * base_pos_coeff])
step_length = (self.step_length if self.step_length is not None else 0.02)
step_rotation = (self.step_rotation if self.step_rotation is not None else step_dir_value) + action[0]
step_angle = self.step_angle if self.step_angle is not None else 0.0
step_period = (self.step_period if self.step_period is not None else 0.75) + action[1]
if self.goal_reached:
self._stay_still = True
frames = self._gait_planner.loop(step_length, step_angle, step_rotation, step_period, 1.0)
fr_angles, fl_angles, rr_angles, rl_angles, _ = self._kinematics.solve(orientation, position, frames)
signal = [
fl_angles[0], fl_angles[1], fl_angles[2],
fr_angles[0], fr_angles[1], fr_angles[2],
rl_angles[0], rl_angles[1], rl_angles[2],
rr_angles[0], rr_angles[1], rr_angles[2]
]
return signal
def _open_loop_signal(self, t, action):
if self.goal_reached:
self._stay_still = True
initial_pose = rex_constants.INIT_POSES['stand_ol']
period = STEP_PERIOD
extension = 0.1
swing = 0.03 + action[0]
swipe = 0.05 + action[1]
ith_leg = int(t / period) % 2
pose = {
'left_0': np.array([swipe, extension, -swing,
-swipe, extension, swing,
swipe, -extension, swing,
-swipe, -extension, -swing]),
'left_1': np.array([-swipe, 0, swing,
swipe, 0, -swing,
-swipe, 0, -swing,
swipe, 0, swing]),
'right_0': np.array([swipe, extension, swing,
-swipe, extension, -swing,
swipe, -extension, -swing,
-swipe, -extension, swing]),
'right_1': np.array([-swipe, 0, -swing,
swipe, 0, swing,
-swipe, 0, swing,
swipe, 0, -swing])
}
clockwise = self._solve_direction()
if clockwise:
# turn right
first_leg = pose['right_0']
second_leg = pose['right_1']
else:
# turn left
first_leg = pose['left_0']
second_leg = pose['left_1']
if ith_leg:
signal = initial_pose + second_leg
else:
signal = initial_pose + first_leg
return signal
def _solve_direction(self):
diff = abs(self._init_orient - self._target_orient)
clockwise = False
if self._init_orient < self._target_orient:
if diff > 3.14:
clockwise = True
else:
if diff < 3.14:
clockwise = True
return clockwise
def _check_target_position(self, t):
current_z = self.pybullet_client.getEulerFromQuaternion(self.rex.GetBaseOrientation())[2]
if current_z < 0:
current_z += 6.28
if abs(self._target_orient - current_z) <= 0.01:
self.goal_reached = True
if not self.is_terminating:
self.end_time = t
self.is_terminating = True
def _terminate_with_delay(self, current_t):
if current_t - self.end_time >= 1.:
self.env_goal_reached = True
def _transform_action_to_motor_command(self, action):
if self._stay_still:
self._terminate_with_delay(self.rex.GetTimeSinceReset())
return self.init_pose
t = self.rex.GetTimeSinceReset()
self._check_target_position(t)
action = self._signal(t, action)
action = super(RexTurnEnv, self)._transform_action_to_motor_command(action)
return action
def is_fallen(self):
"""Decide whether the rex has fallen.
If the up directions between the base and the world is large (the dot
product is smaller than 0.85), the rex is considered fallen.
Returns:
Boolean value that indicates whether the rex has fallen.
"""
orientation = self.rex.GetBaseOrientation()
rot_mat = self._pybullet_client.getMatrixFromQuaternion(orientation)
local_up = rot_mat[6:]
return np.dot(np.asarray([0, 0, 1]), np.asarray(local_up)) < 0.85
def _reward(self):
current_base_position = self.rex.GetBasePosition()
# tolerance: 0.035
position_penality = 0.035 - abs(current_base_position[0]) - abs(current_base_position[1])
reward = position_penality
return reward
def _load_cube(self, angle):
if len(self._companion_obj) > 0:
self.pybullet_client.removeBody(self._companion_obj['cube'])
urdf_root = pybullet_data.getDataPath()
self._cube = self._pybullet_client.loadURDF(f"{urdf_root}/cube_small.urdf")
self._companion_obj['cube'] = self._cube
orientation = [0, 0, 0, 1]
x, y = math.cos(angle + 3.14), math.sin(angle + 3.14)
position = [x, y, 1]
self.pybullet_client.resetBasePositionAndOrientation(self._cube, position, orientation)
def _get_true_observation(self):
"""Get the true observations of this environment.
It includes the roll, the error between current pitch and desired pitch,
roll dot and pitch dot of the base.
Returns:
The observation list.
"""
observation = []
roll, pitch, _ = self.rex.GetTrueBaseRollPitchYaw()
roll_rate, pitch_rate, _ = self.rex.GetTrueBaseRollPitchYawRate()
observation.extend([roll, pitch, roll_rate, pitch_rate])
self._true_observation = np.array(observation)
return self._true_observation
def _get_observation(self):
observation = []
roll, pitch, _ = self.rex.GetBaseRollPitchYaw()
roll_rate, pitch_rate, _ = self.rex.GetBaseRollPitchYawRate()
observation.extend([roll, pitch, roll_rate, pitch_rate])
self._observation = np.array(observation)
return self._observation
def _get_observation_upper_bound(self):
"""Get the upper bound of the observation.
Returns:
The upper bound of an observation. See GetObservation() for the details
of each element of an observation.
"""
upper_bound = np.zeros(self._get_observation_dimension())
upper_bound[0:2] = 2 * math.pi # Roll, pitch, yaw of the base.
upper_bound[2:4] = 2 * math.pi / self._time_step # Roll, pitch, yaw rate.
return upper_bound
def _get_observation_lower_bound(self):
lower_bound = -self._get_observation_upper_bound()
return lower_bound | /rex_gym-0.2.7.tar.gz/rex_gym-0.2.7/rex_gym/envs/gym/turn_env.py | 0.713132 | 0.4575 | turn_env.py | pypi |
import math
import random
from gym import spaces
import numpy as np
from .. import rex_gym_env
from ...model import rex_constants
from ...model.rex import Rex
NUM_LEGS = 4
NUM_MOTORS = 3 * NUM_LEGS
class RexStandupEnv(rex_gym_env.RexGymEnv):
"""The gym environment for the rex.
It simulates the locomotion of a rex, a quadruped robot. The state space
include the angles, velocities and torques for all the motors and the action
space is the desired motor angle for each motor. The reward function is based
on how far the rex walks in 1000 steps and penalizes the energy
expenditure.
"""
metadata = {"render.modes": ["human", "rgb_array"], "video.frames_per_second": 66}
def __init__(self,
debug=False,
urdf_version=None,
control_time_step=0.005,
action_repeat=5,
control_latency=0,
pd_latency=0,
on_rack=False,
motor_kp=1.0,
motor_kd=0.02,
remove_default_joint_damping=False,
render=False,
num_steps_to_log=1000,
env_randomizer=None,
log_path=None,
signal_type="ol",
terrain_type="plane",
terrain_id=None,
mark='base'):
"""Initialize the rex alternating legs gym environment.
Args:
urdf_version: [DEFAULT_URDF_VERSION, DERPY_V0_URDF_VERSION] are allowable
versions. If None, DEFAULT_URDF_VERSION is used. Refer to
rex_gym_env for more details.
control_time_step: The time step between two successive control signals.
action_repeat: The number of simulation steps that an action is repeated.
control_latency: The latency between get_observation() and the actual
observation. See minituar.py for more details.
pd_latency: The latency used to get motor angles/velocities used to
compute PD controllers. See rex.py for more details.
on_rack: Whether to place the rex on rack. This is only used to debug
the walk gait. In this mode, the rex's base is hung midair so
that its walk gait is clearer to visualize.
motor_kp: The P gain of the motor.
motor_kd: The D gain of the motor.
remove_default_joint_damping: Whether to remove the default joint damping.
render: Whether to render the simulation.
num_steps_to_log: The max number of control steps in one episode. If the
number of steps is over num_steps_to_log, the environment will still
be running, but only first num_steps_to_log will be recorded in logging.
env_randomizer: An instance (or a list) of EnvRanzomier(s) that can
randomize the environment during when env.reset() is called and add
perturbations when env.step() is called.
log_path: The path to write out logs. For the details of logging, refer to
rex_logging.proto.
"""
super(RexStandupEnv,
self).__init__(urdf_version=urdf_version,
accurate_motor_model_enabled=True,
motor_overheat_protection=True,
hard_reset=False,
motor_kp=motor_kp,
motor_kd=motor_kd,
remove_default_joint_damping=remove_default_joint_damping,
control_latency=control_latency,
pd_latency=pd_latency,
on_rack=on_rack,
render=render,
num_steps_to_log=num_steps_to_log,
env_randomizer=env_randomizer,
log_path=log_path,
control_time_step=control_time_step,
action_repeat=action_repeat,
signal_type=signal_type,
debug=debug,
terrain_id=terrain_id,
terrain_type=terrain_type,
mark=mark)
action_dim = 1
action_high = np.array([0.1] * action_dim)
self.action_space = spaces.Box(-action_high, action_high)
self._cam_dist = 1.0
self._cam_yaw = 30
self._cam_pitch = -30
if self._on_rack:
self._cam_pitch = 0
def reset(self):
super(RexStandupEnv, self).reset(initial_motor_angles=rex_constants.INIT_POSES['rest_position'],
reset_duration=0.5)
return self._get_observation()
def _signal(self, t, action):
if t > 0.1:
return rex_constants.INIT_POSES['stand']
t += 1
# apply a 'brake' function
signal = rex_constants.INIT_POSES['stand'] * ((.1 + action[0]) / t + 1.5)
return signal
@staticmethod
def _convert_from_leg_model(leg_pose):
motor_pose = np.zeros(NUM_MOTORS)
for i in range(NUM_LEGS):
motor_pose[3 * i] = leg_pose[3 * i]
motor_pose[3 * i + 1] = leg_pose[3 * i + 1]
motor_pose[3 * i + 2] = leg_pose[3 * i + 2]
return motor_pose
def _transform_action_to_motor_command(self, action):
action = self._signal(self.rex.GetTimeSinceReset(), action)
action = self._convert_from_leg_model(action)
action = super(RexStandupEnv, self)._transform_action_to_motor_command(action)
return action
def _termination(self):
return self.is_fallen()
def is_fallen(self):
"""Decide whether the rex has fallen.
If the up directions between the base and the world is large (the dot
product is smaller than 0.85), the rex is considered fallen.
Returns:
Boolean value that indicates whether the rex has fallen.
"""
roll, pitch, _ = self.rex.GetTrueBaseRollPitchYaw()
return math.fabs(roll) > 0.3 or math.fabs(pitch) > 0.5
def _reward(self):
# target position
t_pos = [0.0, 0.0, 0.21]
current_base_position = self.rex.GetBasePosition()
position_reward = abs(t_pos[0] - current_base_position[0]) + \
abs(t_pos[1] - current_base_position[1]) + \
abs(t_pos[2] - current_base_position[2])
if abs(position_reward) < 0.1:
position_reward = 1.0 - position_reward
else:
position_reward = -position_reward
if current_base_position[2] > t_pos[2]:
position_reward = -1.0 - position_reward
reward = position_reward
return reward
def _get_true_observation(self):
"""Get the true observations of this environment.
It includes the roll, the error between current pitch and desired pitch,
roll dot and pitch dot of the base.
Returns:
The observation list.
"""
observation = []
roll, pitch, _ = self.rex.GetTrueBaseRollPitchYaw()
roll_rate, pitch_rate, _ = self.rex.GetTrueBaseRollPitchYawRate()
observation.extend([roll, pitch, roll_rate, pitch_rate])
self._true_observation = np.array(observation)
return self._true_observation
def _get_observation(self):
observation = []
roll, pitch, _ = self.rex.GetBaseRollPitchYaw()
roll_rate, pitch_rate, _ = self.rex.GetBaseRollPitchYawRate()
observation.extend([roll, pitch, roll_rate, pitch_rate])
self._observation = np.array(observation)
return self._observation
def _get_observation_upper_bound(self):
"""Get the upper bound of the observation.
Returns:
The upper bound of an observation. See GetObservation() for the details
of each element of an observation.
"""
upper_bound = np.zeros(self._get_observation_dimension())
upper_bound[0:2] = 2 * math.pi # Roll, pitch, yaw of the base.
upper_bound[2:4] = 2 * math.pi / self._time_step # Roll, pitch, yaw rate.
return upper_bound
def _get_observation_lower_bound(self):
lower_bound = -self._get_observation_upper_bound()
return lower_bound | /rex_gym-0.2.7.tar.gz/rex_gym-0.2.7/rex_gym/envs/gym/standup_env.py | 0.816516 | 0.45944 | standup_env.py | pypi |
import math
import random
from gym import spaces
import numpy as np
from .. import rex_gym_env
from ...model import rex_constants
from ...model.gait_planner import GaitPlanner
from ...model.kinematics import Kinematics
NUM_LEGS = 4
class RexWalkEnv(rex_gym_env.RexGymEnv):
"""The gym environment for the rex.
It simulates the locomotion of a rex, a quadruped robot. The state space
include the angles, velocities and torques for all the motors and the action
space is the desired motor angle for each motor. The reward function is based
on how far the rex walks in 2000 steps and penalizes the energy
expenditure or how near rex is to the target position.
"""
metadata = {"render.modes": ["human", "rgb_array"], "video.frames_per_second": 66}
load_ui = True
is_terminating = False
def __init__(self,
debug=False,
urdf_version=None,
control_time_step=0.005,
action_repeat=5,
control_latency=0,
pd_latency=0,
on_rack=False,
motor_kp=1.0,
motor_kd=0.02,
render=False,
num_steps_to_log=2000,
env_randomizer=None,
log_path=None,
target_position=None,
backwards=None,
signal_type="ik",
terrain_type="plane",
terrain_id=None,
mark='base'):
"""Initialize the rex alternating legs gym environment.
Args:
urdf_version: [DEFAULT_URDF_VERSION, DERPY_V0_URDF_VERSION] are allowable
versions. If None, DEFAULT_URDF_VERSION is used. Refer to
rex_gym_env for more details.
control_time_step: The time step between two successive control signals.
action_repeat: The number of simulation steps that an action is repeated.
control_latency: The latency between get_observation() and the actual
observation. See minituar.py for more details.
pd_latency: The latency used to get motor angles/velocities used to
compute PD controllers. See rex.py for more details.
on_rack: Whether to place the rex on rack. This is only used to debug
the walk gait. In this mode, the rex's base is hung midair so
that its walk gait is clearer to visualize.
motor_kp: The P gain of the motor.
motor_kd: The D gain of the motor.
remove_default_joint_damping: Whether to remove the default joint damping.
render: Whether to render the simulation.
num_steps_to_log: The max number of control steps in one episode. If the
number of steps is over num_steps_to_log, the environment will still
be running, but only first num_steps_to_log will be recorded in logging.
env_randomizer: An instance (or a list) of EnvRanzomier(s) that can
randomize the environment during when env.reset() is called and add
perturbations when env.step() is called.
log_path: The path to write out logs. For the details of logging, refer to
rex_logging.proto.
"""
super(RexWalkEnv,
self).__init__(urdf_version=urdf_version,
accurate_motor_model_enabled=True,
motor_overheat_protection=True,
hard_reset=False,
motor_kp=motor_kp,
motor_kd=motor_kd,
remove_default_joint_damping=False,
control_latency=control_latency,
pd_latency=pd_latency,
on_rack=on_rack,
render=render,
num_steps_to_log=num_steps_to_log,
env_randomizer=env_randomizer,
log_path=log_path,
control_time_step=control_time_step,
action_repeat=action_repeat,
target_position=target_position,
signal_type=signal_type,
backwards=backwards,
debug=debug,
terrain_id=terrain_id,
terrain_type=terrain_type,
mark=mark)
# (eventually) allow different feedback ranges/action spaces for different signals
action_max = {
'ik': 0.4,
'ol': 0.01
}
action_dim_map = {
'ik': 2,
'ol': 8
}
action_dim = action_dim_map[self._signal_type]
action_high = np.array([action_max[self._signal_type]] * action_dim)
self.action_space = spaces.Box(-action_high, action_high)
self._cam_dist = 1.0
self._cam_yaw = 0.0
self._cam_pitch = -20
self._signal_type = signal_type
self._gait_planner = GaitPlanner("walk")
self._kinematics = Kinematics()
self.goal_reached = False
self._stay_still = False
self.is_terminating = False
def reset(self):
self.init_pose = rex_constants.INIT_POSES["stand"]
if self._signal_type == 'ol':
self.init_pose = rex_constants.INIT_POSES["stand_ol"]
super(RexWalkEnv, self).reset(initial_motor_angles=self.init_pose, reset_duration=0.5)
self.goal_reached = False
self.is_terminating = False
self._stay_still = False
if self._backwards is None:
self.backwards = random.choice([True, False])
else:
self.backwards = self._backwards
step = 0.6
period = 0.65
base_x = self._base_x
if self.backwards:
step = -.3
period = .5
base_x = .0
if not self._target_position or self._random_pos_target:
bound = -3 if self.backwards else 3
self._target_position = random.uniform(bound//2, bound)
self._random_pos_target = True
if self._is_render and self._signal_type == 'ik':
if self.load_ui:
self.setup_ui(base_x, step, period)
self.load_ui = False
if self._is_debug:
print(f"Target Position x={self._target_position}, Random assignment: {self._random_pos_target}, Backwards: {self.backwards}")
return self._get_observation()
def setup_ui(self, base_x, step, period):
self.base_x_ui = self._pybullet_client.addUserDebugParameter("base_x",
self._ranges["base_x"][0],
self._ranges["base_x"][1],
base_x)
self.base_y_ui = self._pybullet_client.addUserDebugParameter("base_y",
self._ranges["base_y"][0],
self._ranges["base_y"][1],
self._ranges["base_y"][2])
self.base_z_ui = self._pybullet_client.addUserDebugParameter("base_z",
self._ranges["base_z"][0],
self._ranges["base_z"][1],
self._ranges["base_z"][2])
self.roll_ui = self._pybullet_client.addUserDebugParameter("roll",
self._ranges["roll"][0],
self._ranges["roll"][1],
self._ranges["roll"][2])
self.pitch_ui = self._pybullet_client.addUserDebugParameter("pitch",
self._ranges["pitch"][0],
self._ranges["pitch"][1],
self._ranges["pitch"][2])
self.yaw_ui = self._pybullet_client.addUserDebugParameter("yaw",
self._ranges["yaw"][0],
self._ranges["yaw"][1],
self._ranges["yaw"][2])
self.step_length_ui = self._pybullet_client.addUserDebugParameter("step_length", -0.7, 0.7, step)
self.step_rotation_ui = self._pybullet_client.addUserDebugParameter("step_rotation", -1.5, 1.5, 0.)
self.step_angle_ui = self._pybullet_client.addUserDebugParameter("step_angle", -180., 180., 0.)
self.step_period_ui = self._pybullet_client.addUserDebugParameter("step_period", 0.2, 0.9, period)
def _read_inputs(self, base_pos_coeff, gait_stage_coeff):
position = np.array(
[
self._pybullet_client.readUserDebugParameter(self.base_x_ui),
self._pybullet_client.readUserDebugParameter(self.base_y_ui) * base_pos_coeff,
self._pybullet_client.readUserDebugParameter(self.base_z_ui) * base_pos_coeff
]
)
orientation = np.array(
[
self._pybullet_client.readUserDebugParameter(self.roll_ui) * base_pos_coeff,
self._pybullet_client.readUserDebugParameter(self.pitch_ui) * base_pos_coeff,
self._pybullet_client.readUserDebugParameter(self.yaw_ui) * base_pos_coeff
]
)
step_length = self._pybullet_client.readUserDebugParameter(self.step_length_ui) * gait_stage_coeff
step_rotation = self._pybullet_client.readUserDebugParameter(self.step_rotation_ui)
step_angle = self._pybullet_client.readUserDebugParameter(self.step_angle_ui)
step_period = self._pybullet_client.readUserDebugParameter(self.step_period_ui)
return position, orientation, step_length, step_rotation, step_angle, step_period
def _check_target_position(self, t):
if self._target_position:
current_x = abs(self.rex.GetBasePosition()[0])
# give 0.15 stop space
if current_x >= abs(self._target_position) - 0.15:
self.goal_reached = True
if not self.is_terminating:
self.end_time = t
self.is_terminating = True
@staticmethod
def _evaluate_base_stage_coeff(current_t, end_t=0.0, width=0.001):
# sigmoid function
beta = p = width
if p - beta + end_t <= current_t <= p - (beta / 2) + end_t:
return (2 / beta ** 2) * (current_t - p + beta) ** 2
elif p - (beta/2) + end_t <= current_t <= p + end_t:
return 1 - (2 / beta ** 2) * (current_t - p) ** 2
else:
return 1
@staticmethod
def _evaluate_gait_stage_coeff(current_t, action, end_t=0.0):
# ramp function
p = 0.8 + action[0]
if end_t <= current_t <= p + end_t:
return current_t
else:
return 1.0
@staticmethod
def _evaluate_brakes_stage_coeff(current_t, action, end_t=0.0, end_value=0.0):
# ramp function
p = 0.8 + action[1]
if end_t <= current_t <= p + end_t:
return 1 - (current_t - end_t)
else:
return end_value
def _signal(self, t, action):
if self._signal_type == 'ik':
return self._IK_signal(t, action)
if self._signal_type == 'ol':
return self._open_loop_signal(t, action)
def _IK_signal(self, t, action):
base_pos_coeff = self._evaluate_base_stage_coeff(t, width=1.5)
gait_stage_coeff = self._evaluate_gait_stage_coeff(t, action)
step = 0.6
period = 0.65
base_x = self._base_x
if self.backwards:
step = -.3
period = .5
base_x = .0
if self._is_render and self._is_debug:
position, orientation, step_length, step_rotation, step_angle, step_period = \
self._read_inputs(base_pos_coeff, gait_stage_coeff)
else:
position = np.array([base_x,
self._base_y * base_pos_coeff,
self._base_z * base_pos_coeff])
orientation = np.array([self._base_roll * base_pos_coeff,
self._base_pitch * base_pos_coeff,
self._base_yaw * base_pos_coeff])
step_length = (self.step_length if self.step_length is not None else step) * gait_stage_coeff
step_rotation = (self.step_rotation if self.step_rotation is not None else 0.0)
step_angle = self.step_angle if self.step_angle is not None else 0.0
step_period = (self.step_period if self.step_period is not None else period)
if self.goal_reached:
brakes_coeff = self._evaluate_brakes_stage_coeff(t, action, self.end_time)
step_length *= brakes_coeff
if brakes_coeff == 0.0:
self._stay_still = True
direction = -1.0 if step_length < 0 else 1.0
frames = self._gait_planner.loop(step_length, step_angle, step_rotation, step_period, direction)
fr_angles, fl_angles, rr_angles, rl_angles, _ = self._kinematics.solve(orientation, position, frames)
signal = [
fl_angles[0], fl_angles[1], fl_angles[2],
fr_angles[0], fr_angles[1], fr_angles[2],
rl_angles[0], rl_angles[1], rl_angles[2],
rr_angles[0], rr_angles[1], rr_angles[2]
]
return signal
def _open_loop_signal(self, t, action):
period = 1.0 / 8
l_a = 0.1
f_a = l_a * 2
if self.goal_reached:
coeff = self._evaluate_brakes_stage_coeff(t, [0., 0.], end_t=self.end_time, end_value=0.0)
l_a *= coeff
f_a *= coeff
if coeff is 0.0:
self._stay_still = True
start_coeff = self._evaluate_gait_stage_coeff(t, [0.0])
l_a *= start_coeff
f_a *= start_coeff
l_extension = l_a * math.cos(2 * math.pi / period * t)
f_extension = f_a * math.cos(2 * math.pi / period * t)
initial_pose = self.init_pose
l_swing = -l_extension
swing = -f_extension
pose = np.array([0.0, l_extension + action[0], f_extension + action[1],
0.0, l_swing + action[2], swing + action[3],
0.0, l_swing + action[4], swing + action[5],
0.0, l_extension + action[6], f_extension + action[7]])
signal = initial_pose + pose
return signal
def _transform_action_to_motor_command(self, action):
if self._stay_still:
return self.init_pose
t = self.rex.GetTimeSinceReset()
self._check_target_position(t)
action = self._signal(t, action)
action = super(RexWalkEnv, self)._transform_action_to_motor_command(action)
return action
def is_fallen(self):
"""Decide whether the rex has fallen.
If the up directions between the base and the world is large (the dot
product is smaller than 0.85), the rex is considered fallen.
Returns:
Boolean value that indicates whether the rex has fallen.
"""
orientation = self.rex.GetBaseOrientation()
rot_mat = self._pybullet_client.getMatrixFromQuaternion(orientation)
local_up = rot_mat[6:]
return np.dot(np.asarray([0, 0, 1]), np.asarray(local_up)) < 0.85
def _get_true_observation(self):
"""Get the true observations of this environment.
It includes the roll, the error between current pitch and desired pitch,
roll dot and pitch dot of the base.
Returns:
The observation list.
"""
observation = []
roll, pitch, _ = self.rex.GetTrueBaseRollPitchYaw()
roll_rate, pitch_rate, _ = self.rex.GetTrueBaseRollPitchYawRate()
observation.extend([roll, pitch, roll_rate, pitch_rate])
self._true_observation = np.array(observation)
return self._true_observation
def _get_observation(self):
observation = []
roll, pitch, _ = self.rex.GetBaseRollPitchYaw()
roll_rate, pitch_rate, _ = self.rex.GetBaseRollPitchYawRate()
observation.extend([roll, pitch, roll_rate, pitch_rate])
self._observation = np.array(observation)
return self._observation
def _get_observation_upper_bound(self):
"""Get the upper bound of the observation.
Returns:
The upper bound of an observation. See GetObservation() for the details
of each element of an observation.
"""
upper_bound = np.zeros(self._get_observation_dimension())
upper_bound[0:2] = 2 * math.pi # Roll, pitch, yaw of the base.
upper_bound[2:4] = 2 * math.pi / self._time_step # Roll, pitch, yaw rate.
return upper_bound
def _get_observation_lower_bound(self):
lower_bound = -self._get_observation_upper_bound()
return lower_bound | /rex_gym-0.2.7.tar.gz/rex_gym-0.2.7/rex_gym/envs/gym/walk_env.py | 0.705176 | 0.491212 | walk_env.py | pypi |
import collections
import math
import random
from gym import spaces
import numpy as np
from .. import rex_gym_env
from rex_gym.model.kinematics import Kinematics
from ...model.rex import Rex
STEP_PERIOD = 1.0 / 15.0 # 15 steps per second.
STEP_AMPLITUDE = 0.25
NUM_LEGS = 4
NUM_MOTORS = 3 * NUM_LEGS
class RexPosesEnv(rex_gym_env.RexGymEnv):
"""The gym environment for the rex.
It simulates the locomotion of a rex, a quadruped robot. The state space
include the angles, velocities and torques for all the motors and the action
space is the desired motor angle for each motor. The reward function is based
on how far the rex walks in 1000 steps and penalizes the energy
expenditure.
"""
metadata = {"render.modes": ["human", "rgb_array"], "video.frames_per_second": 66}
load_ui = True
manual_control = False
def __init__(self,
debug=False,
urdf_version=None,
control_time_step=0.006,
action_repeat=6,
control_latency=0,
pd_latency=0,
on_rack=False,
motor_kp=1.0,
motor_kd=0.02,
remove_default_joint_damping=False,
render=False,
num_steps_to_log=1000,
env_randomizer=None,
log_path=None,
base_y=None,
base_z=None,
base_roll=None,
base_pitch=None,
base_yaw=None,
signal_type='ik',
terrain_type="plane",
terrain_id=None,
mark='base'):
"""Initialize the rex alternating legs gym environment.
Args:
urdf_version: [DEFAULT_URDF_VERSION, DERPY_V0_URDF_VERSION] are allowable
versions. If None, DEFAULT_URDF_VERSION is used. Refer to
rex_gym_env for more details.
control_time_step: The time step between two successive control signals.
action_repeat: The number of simulation steps that an action is repeated.
control_latency: The latency between get_observation() and the actual
observation. See minituar.py for more details.
pd_latency: The latency used to get motor angles/velocities used to
compute PD controllers. See rex.py for more details.
on_rack: Whether to place the rex on rack. This is only used to debug
the walk gait. In this mode, the rex's base is hung midair so
that its walk gait is clearer to visualize.
motor_kp: The P gain of the motor.
motor_kd: The D gain of the motor.
remove_default_joint_damping: Whether to remove the default joint damping.
render: Whether to render the simulation.
num_steps_to_log: The max number of control steps in one episode. If the
number of steps is over num_steps_to_log, the environment will still
be running, but only first num_steps_to_log will be recorded in logging.
env_randomizer: An instance (or a list) of EnvRanzomier(s) that can
randomize the environment during when env.reset() is called and add
perturbations when env.step() is called.
log_path: The path to write out logs. For the details of logging, refer to
rex_logging.proto.
"""
super(RexPosesEnv,
self).__init__(urdf_version=urdf_version,
accurate_motor_model_enabled=True,
motor_overheat_protection=True,
hard_reset=False,
motor_kp=motor_kp,
motor_kd=motor_kd,
remove_default_joint_damping=remove_default_joint_damping,
control_latency=control_latency,
pd_latency=pd_latency,
on_rack=on_rack,
render=render,
num_steps_to_log=num_steps_to_log,
env_randomizer=env_randomizer,
log_path=log_path,
control_time_step=control_time_step,
action_repeat=action_repeat,
base_y=base_y,
base_z=base_z,
base_roll=base_roll,
base_pitch=base_pitch,
base_yaw=base_yaw,
debug=debug,
signal_type=signal_type,
terrain_id=terrain_id,
terrain_type=terrain_type,
mark=mark)
self.mark = mark
action_dim = 1
action_high = np.array([0.1] * action_dim)
self.action_space = spaces.Box(-action_high, action_high)
self._cam_dist = 1.0
self._cam_yaw = 30
self._cam_pitch = -30
self.stand = False
if self._on_rack:
self._cam_pitch = 0
self._init_base_x = self._ranges["base_x"][2]
def setup_ui_params(self):
self.base_x = self._pybullet_client.addUserDebugParameter("base_x",
self._ranges["base_x"][0],
self._ranges["base_x"][1],
self._ranges["base_x"][2])
self.base_y = self._pybullet_client.addUserDebugParameter("base_y",
self._ranges["base_y"][0],
self._ranges["base_y"][1],
self._ranges["base_y"][2])
self.base_z = self._pybullet_client.addUserDebugParameter("base_z",
self._ranges["base_z"][0],
self._ranges["base_z"][1],
self._ranges["base_z"][2])
self.roll = self._pybullet_client.addUserDebugParameter("roll",
self._ranges["roll"][0],
self._ranges["roll"][1],
self._ranges["roll"][2])
self.pitch = self._pybullet_client.addUserDebugParameter("pitch",
self._ranges["pitch"][0],
self._ranges["pitch"][1],
self._ranges["pitch"][2])
self.yaw = self._pybullet_client.addUserDebugParameter("yaw",
self._ranges["yaw"][0],
self._ranges["yaw"][1],
self._ranges["yaw"][2])
def reset(self):
super(RexPosesEnv, self).reset()
if self._is_render:
if self.load_ui:
self.setup_ui_params()
self.load_ui = False
self.manual_control = True
else:
if self._base_y is not None or self._base_z is not None or self._base_roll is not None \
or self._base_pitch is not None or self._base_yaw is not None:
self.fill_next_pose_and_target()
else:
self.next_pose = self._queue.popleft()
# requeue element
self._queue.append(self.next_pose)
self.target_value = random.uniform(self._ranges[self.next_pose][0], self._ranges[self.next_pose][1])
self.values = self._ranges.copy()
return self._get_observation()
def fill_next_pose_and_target(self):
if self._base_y != 0.0:
self.next_pose = "base_y"
self.target_value = self._base_y
elif self._base_z != 0.0:
self.next_pose = "base_z"
self.target_value = self._base_z
elif self._base_roll != 0.0:
self.next_pose = "roll"
self.target_value = self._base_roll
elif self._base_pitch != 0.0:
self.next_pose = "pitch"
self.target_value = self._base_pitch
else:
self.next_pose = "yaw"
self.target_value = self._base_yaw
@staticmethod
def _evaluate_stage_coefficient(current_t, action, end_t=0.0):
# ramp function
p = 0.8 + action[0]
if end_t <= current_t <= p + end_t:
return current_t
else:
return 1.0
def _signal(self, t, action):
if not self.manual_control:
stage_coeff = self._evaluate_stage_coefficient(t, action)
staged_value = self.target_value * stage_coeff
self.values[self.next_pose] = (self.values[self.next_pose][0],
self.values[self.next_pose][1],
staged_value)
self.position = np.array([
self.values["base_x"][2],
self.values["base_y"][2],
self.values["base_z"][2]
])
self.orientation = np.array([
self.values["roll"][2],
self.values["pitch"][2],
self.values["yaw"][2]
])
else:
self.position, self.orientation = self._read_inputs()
kinematics = Kinematics()
fr_angles, fl_angles, rr_angles, rl_angles, _ = kinematics.solve(self.orientation, self.position)
signal = [
fl_angles[0], fl_angles[1], fl_angles[2],
fr_angles[0], fr_angles[1], fr_angles[2],
rl_angles[0], rl_angles[1], rl_angles[2],
rr_angles[0], rr_angles[1], rr_angles[2]
]
return signal
def _read_inputs(self):
position = np.array(
[
self._pybullet_client.readUserDebugParameter(self.base_x),
self._pybullet_client.readUserDebugParameter(self.base_y),
self._pybullet_client.readUserDebugParameter(self.base_z)
]
)
orientation = np.array(
[
self._pybullet_client.readUserDebugParameter(self.roll),
self._pybullet_client.readUserDebugParameter(self.pitch),
self._pybullet_client.readUserDebugParameter(self.yaw)
]
)
return position, orientation
@staticmethod
def _convert_from_leg_model(leg_pose):
motor_pose = np.zeros(NUM_MOTORS)
for i in range(NUM_LEGS):
motor_pose[3 * i] = leg_pose[3 * i]
motor_pose[3 * i + 1] = leg_pose[3 * i + 1]
motor_pose[3 * i + 2] = leg_pose[3 * i + 2]
return motor_pose
def _transform_action_to_motor_command(self, action):
action = self._signal(self.rex.GetTimeSinceReset(), action)
action = self._convert_from_leg_model(action)
action = super(RexPosesEnv, self)._transform_action_to_motor_command(action)
return action
def is_fallen(self):
"""Decide whether the rex has fallen.
Returns:
Boolean value that indicates whether the rex has fallen.
"""
roll, _, _ = self.rex.GetTrueBaseRollPitchYaw()
return False
def _reward(self):
# positive reward as long as rex stands
return 1.0
def _get_true_observation(self):
"""Get the true observations of this environment.
It includes the roll, the error between current pitch and desired pitch,
roll dot and pitch dot of the base.
Returns:
The observation list.
"""
observation = []
roll, pitch, _ = self.rex.GetTrueBaseRollPitchYaw()
roll_rate, pitch_rate, _ = self.rex.GetTrueBaseRollPitchYawRate()
observation.extend([roll, pitch, roll_rate, pitch_rate])
self._true_observation = np.array(observation)
return self._true_observation
def _get_observation(self):
observation = []
roll, pitch, _ = self.rex.GetBaseRollPitchYaw()
roll_rate, pitch_rate, _ = self.rex.GetBaseRollPitchYawRate()
observation.extend([roll, pitch, roll_rate, pitch_rate])
self._observation = np.array(observation)
return self._observation
def _get_observation_upper_bound(self):
"""Get the upper bound of the observation.
Returns:
The upper bound of an observation. See GetObservation() for the details
of each element of an observation.
"""
upper_bound = np.zeros(self._get_observation_dimension())
upper_bound[0:2] = 2 * math.pi # Roll, pitch, yaw of the base.
upper_bound[2:4] = 2 * math.pi / self._time_step # Roll, pitch, yaw rate.
return upper_bound
def _get_observation_lower_bound(self):
lower_bound = -self._get_observation_upper_bound()
return lower_bound | /rex_gym-0.2.7.tar.gz/rex_gym-0.2.7/rex_gym/envs/gym/poses_env.py | 0.695958 | 0.343617 | poses_env.py | pypi |
import collections
import math
import random
from gym import spaces
import numpy as np
from rex_gym.model import mark_constants
from rex_gym.model.gait_planner import GaitPlanner
from .. import rex_gym_env
# Radiant
from ...model import rex_constants
from ...model.kinematics import Kinematics
NUM_LEGS = 4
NUM_MOTORS = 3 * NUM_LEGS
RexPose = collections.namedtuple(
"RexPose", "shoulder_angle_1, leg_angle_1, foot_angle_1, "
"shoulder_angle_2, leg_angle_2, foot_angle_2, shoulder_angle_3, leg_angle_3, foot_angle_3,"
"shoulder_angle_4, leg_angle_4, foot_angle_4")
class RexReactiveEnv(rex_gym_env.RexGymEnv):
"""The gym environment for Rex.
It simulates the locomotion of Rex, a quadruped robot. The state space
include the angles, velocities and torques for all the motors and the action
space is the desired motor angle for each motor. The reward function is based
on how far Rex walks in 1000 steps and penalizes the energy
expenditure.
"""
metadata = {"render.modes": ["human", "rgb_array"], "video.frames_per_second": 166}
load_ui = True
is_terminating = False
def __init__(self,
debug=False,
urdf_version=None,
energy_weight=0.005,
control_time_step=0.006,
action_repeat=6,
control_latency=0.0,
pd_latency=0.0,
on_rack=False,
motor_kp=1.0,
motor_kd=0.02,
render=False,
num_steps_to_log=2000,
use_angle_in_observation=True,
env_randomizer=None,
log_path=None,
target_position=None,
signal_type="ik",
terrain_type="plane",
terrain_id=None,
mark='base'):
"""Initialize Rex trotting gym environment.
Args:
urdf_version: [DEFAULT_URDF_VERSION] are allowable
versions. If None, DEFAULT_URDF_VERSION is used. Refer to
rex_gym_env for more details.
energy_weight: The weight of the energy term in the reward function. Refer
to rex_gym_env for more details.
control_time_step: The time step between two successive control signals.
action_repeat: The number of simulation steps that an action is repeated.
control_latency: The latency between get_observation() and the actual
observation. See rex.py for more details.
pd_latency: The latency used to get motor angles/velocities used to
compute PD controllers. See rex.py for more details.
on_rack: Whether to place Rex on rack. This is only used to debug
the walk gait. In this mode, Rex's base is hung midair so
that its walk gait is clearer to visualize.
motor_kp: The P gain of the motor.
motor_kd: The D gain of the motor.
num_steps_to_log: The max number of control steps in one episode. If the
number of steps is over num_steps_to_log, the environment will still
be running, but only first num_steps_to_log will be recorded in logging.
use_angle_in_observation: Whether to include motor angles in observation.
env_randomizer: An instance (or a list) of EnvRanzomier(s) that can
randomize the environment during when env.reset() is called and add
perturbations when env.step() is called.
log_path: The path to write out logs. For the details of logging, refer to
rex_logging.proto.
"""
self._use_angle_in_observation = use_angle_in_observation
super(RexReactiveEnv,
self).__init__(urdf_version=urdf_version,
energy_weight=energy_weight,
accurate_motor_model_enabled=True,
motor_overheat_protection=True,
hard_reset=False,
motor_kp=motor_kp,
motor_kd=motor_kd,
remove_default_joint_damping=False,
control_latency=control_latency,
pd_latency=pd_latency,
on_rack=on_rack,
render=render,
num_steps_to_log=num_steps_to_log,
env_randomizer=env_randomizer,
log_path=log_path,
control_time_step=control_time_step,
action_repeat=action_repeat,
target_position=target_position,
signal_type=signal_type,
debug=debug,
terrain_id=terrain_id,
terrain_type=terrain_type,
mark=mark)
# (eventually) allow different feedback ranges/action spaces for different signals
action_max = {
'ik': 0.4,
'ol': 0.3
}
action_dim_map = {
'ik': 2,
'ol': 4,
}
action_dim = action_dim_map[self._signal_type]
action_low = np.array([action_max[self._signal_type]] * action_dim)
action_high = -action_low
self.action_space = spaces.Box(action_low, action_high)
self._cam_dist = 1.0
self._cam_yaw = 0.0
self._cam_pitch = -20
self._target_position = target_position
self._signal_type = signal_type
self._gait_planner = GaitPlanner("gallop")
self._kinematics = Kinematics()
self.goal_reached = False
self._stay_still = False
self.is_terminating = False
def reset(self):
self.init_pose = rex_constants.INIT_POSES["stand"]
if self._signal_type == 'ol':
self.init_pose = rex_constants.INIT_POSES["stand_ol"]
super(RexReactiveEnv, self).reset(initial_motor_angles=self.init_pose, reset_duration=0.5)
self.goal_reached = False
self._stay_still = False
self.is_terminating = False
if not self._target_position or self._random_pos_target:
self._target_position = random.uniform(1, 3)
self._random_pos_target = True
if self._is_render and self._signal_type == 'ik':
if self.load_ui:
self.setup_ui()
self.load_ui = False
if self._is_debug:
print(f"Target Position x={self._target_position}, Random assignment: {self._random_pos_target}")
return self._get_observation()
def setup_ui(self):
self.base_x_ui = self._pybullet_client.addUserDebugParameter("base_x",
self._ranges["base_x"][0],
self._ranges["base_x"][1],
0.01)
self.base_y_ui = self._pybullet_client.addUserDebugParameter("base_y",
self._ranges["base_y"][0],
self._ranges["base_y"][1],
self._ranges["base_y"][2])
self.base_z_ui = self._pybullet_client.addUserDebugParameter("base_z",
self._ranges["base_z"][0],
self._ranges["base_z"][1],
-0.007)
self.roll_ui = self._pybullet_client.addUserDebugParameter("roll",
self._ranges["roll"][0],
self._ranges["roll"][1],
self._ranges["roll"][2])
self.pitch_ui = self._pybullet_client.addUserDebugParameter("pitch",
self._ranges["pitch"][0],
self._ranges["pitch"][1],
self._ranges["pitch"][2])
self.yaw_ui = self._pybullet_client.addUserDebugParameter("yaw",
self._ranges["yaw"][0],
self._ranges["yaw"][1],
self._ranges["yaw"][2])
self.step_length_ui = self._pybullet_client.addUserDebugParameter("step_length", -0.7, 1.5, 1.3)
self.step_rotation_ui = self._pybullet_client.addUserDebugParameter("step_rotation", -1.5, 1.5, 0.)
self.step_angle_ui = self._pybullet_client.addUserDebugParameter("step_angle", -180., 180., 0.)
self.step_period_ui = self._pybullet_client.addUserDebugParameter("step_period", 0.1, .9, 0.3)
def _read_inputs(self, base_pos_coeff, gait_stage_coeff):
position = np.array(
[
self._pybullet_client.readUserDebugParameter(self.base_x_ui),
self._pybullet_client.readUserDebugParameter(self.base_y_ui) * base_pos_coeff,
self._pybullet_client.readUserDebugParameter(self.base_z_ui)
]
)
orientation = np.array(
[
self._pybullet_client.readUserDebugParameter(self.roll_ui) * base_pos_coeff,
self._pybullet_client.readUserDebugParameter(self.pitch_ui) * base_pos_coeff,
self._pybullet_client.readUserDebugParameter(self.yaw_ui) * base_pos_coeff
]
)
step_length = self._pybullet_client.readUserDebugParameter(self.step_length_ui) * gait_stage_coeff
step_rotation = self._pybullet_client.readUserDebugParameter(self.step_rotation_ui)
step_angle = self._pybullet_client.readUserDebugParameter(self.step_angle_ui)
step_period = self._pybullet_client.readUserDebugParameter(self.step_period_ui)
return position, orientation, step_length, step_rotation, step_angle, step_period
def _check_target_position(self, t):
if self._target_position:
current_x = abs(self.rex.GetBasePosition()[0])
# give 0.15 stop space
if current_x >= abs(self._target_position):
self.goal_reached = True
if not self.is_terminating:
self.end_time = t
self.is_terminating = True
@staticmethod
def _evaluate_stage_coefficient(current_t, end_t=0.0, width=0.001):
# sigmoid function
beta = p = width
if p - beta + end_t <= current_t <= p - (beta / 2) + end_t:
return (2 / beta ** 2) * (current_t - p + beta) ** 2
elif p - (beta/2) + end_t <= current_t <= p + end_t:
return 1 - (2 / beta ** 2) * (current_t - p) ** 2
else:
return 1
@staticmethod
def _evaluate_brakes_stage_coeff(current_t, action, end_t=0.0, end_value=0.0):
# ramp function
p = 1. + action[0]
if end_t <= current_t <= p + end_t:
return 1 - (current_t - end_t)
else:
return end_value
@staticmethod
def _evaluate_gait_stage_coeff(current_t, action, end_t=0.0):
# ramp function
p = 1. + action[1]
if end_t <= current_t <= p + end_t:
return current_t
else:
return 1.0
def _signal(self, t, action):
if self._signal_type == 'ik':
return self._IK_signal(t, action)
if self._signal_type == 'ol':
return self._open_loop_signal(t, action)
def _IK_signal(self, t, action):
base_pos_coeff = self._evaluate_stage_coefficient(t, width=1.5)
gait_stage_coeff = self._evaluate_gait_stage_coeff(t, action)
if self._is_render:
position, orientation, step_length, step_rotation, step_angle, step_period = \
self._read_inputs(base_pos_coeff, gait_stage_coeff)
else:
position = np.array([0.01,
self._base_y * base_pos_coeff,
-0.007])
orientation = np.array([self._base_roll * base_pos_coeff,
self._base_pitch * base_pos_coeff,
self._base_yaw * base_pos_coeff])
step_length = (self.step_length if self.step_length is not None else 1.3) * gait_stage_coeff
step_rotation = (self.step_rotation if self.step_rotation is not None else 0.0)
step_angle = self.step_angle if self.step_angle is not None else 0.0
step_period = (self.step_period if self.step_period is not None else 0.3)
if self.goal_reached:
brakes_coeff = self._evaluate_brakes_stage_coeff(t, action, self.end_time)
step_length *= brakes_coeff
frames = self._gait_planner.loop(step_length, step_angle, step_rotation, step_period, 1.0)
fr_angles, fl_angles, rr_angles, rl_angles, _ = self._kinematics.solve(orientation, position, frames)
signal = [
fl_angles[0], fl_angles[1], fl_angles[2],
fr_angles[0], fr_angles[1], fr_angles[2],
rl_angles[0], rl_angles[1], rl_angles[2],
rr_angles[0], rr_angles[1], rr_angles[2]
]
return signal
def _open_loop_signal(self, t, leg_pose):
if self.goal_reached:
coeff = self._evaluate_brakes_stage_coeff(t, [.0], end_t=self.end_time, end_value=0.0)
leg_pose *= coeff
if coeff is 0.0:
self._stay_still = True
motor_pose = np.zeros(NUM_MOTORS)
for i in range(NUM_LEGS):
motor_pose[int(3 * i)] = self.init_pose[3 * i]
init_leg = self.init_pose[3 * i + 1]
init_foot = self.init_pose[3 * i + 2]
if i == 0 or i == 1:
motor_pose[int(3 * i + 1)] = init_leg + leg_pose[0]
motor_pose[int(3 * i + 2)] = init_foot + leg_pose[1]
else:
motor_pose[int(3 * i + 1)] = init_leg + leg_pose[2]
motor_pose[int(3 * i + 2)] = init_foot + leg_pose[3]
return motor_pose
def _transform_action_to_motor_command(self, action):
if self._stay_still:
return self.rex.initial_pose
t = self.rex.GetTimeSinceReset()
self._check_target_position(t)
action = self._signal(t, action)
action = super(RexReactiveEnv, self)._transform_action_to_motor_command(action)
return action
def _out_of_trajectory(self):
current_base_position = self.rex.GetBasePosition()
return current_base_position[1] > 0.3
def is_fallen(self):
"""Decides whether Rex is in a fallen state.
If the roll or the pitch of the base is greater than 0.3 radians, the
rex is considered fallen.
Returns:
Boolean value that indicates whether Rex has fallen.
"""
roll, pitch, _ = self.rex.GetTrueBaseRollPitchYaw()
return math.fabs(roll) > 0.3 or math.fabs(pitch) > 0.5
def _get_true_observation(self):
"""Get the true observations of this environment.
It includes the roll, the pitch, the roll dot and the pitch dot of the base.
If _use_angle_in_observation is true, eight motor angles are added into the
observation.
Returns:
The observation list, which is a numpy array of floating-point values.
"""
roll, pitch, _ = self.rex.GetTrueBaseRollPitchYaw()
roll_rate, pitch_rate, _ = self.rex.GetTrueBaseRollPitchYawRate()
observation = [roll, pitch, roll_rate, pitch_rate]
if self._use_angle_in_observation:
observation.extend(self.rex.GetMotorAngles().tolist())
self._true_observation = np.array(observation)
return self._true_observation
def _get_observation(self):
roll, pitch, _ = self.rex.GetBaseRollPitchYaw()
roll_rate, pitch_rate, _ = self.rex.GetBaseRollPitchYawRate()
observation = [roll, pitch, roll_rate, pitch_rate]
if self._use_angle_in_observation:
observation.extend(self.rex.GetMotorAngles().tolist())
self._observation = np.array(observation)
return self._observation
def _get_observation_upper_bound(self):
"""Get the upper bound of the observation.
Returns:
The upper bound of an observation. See _get_true_observation() for the
details of each element of an observation.
"""
upper_bound_roll = 2 * math.pi
upper_bound_pitch = 2 * math.pi
upper_bound_roll_dot = 2 * math.pi / self._time_step
upper_bound_pitch_dot = 2 * math.pi / self._time_step
upper_bound_motor_angle = 2 * math.pi
upper_bound = [
upper_bound_roll, upper_bound_pitch, upper_bound_roll_dot, upper_bound_pitch_dot
]
if self._use_angle_in_observation:
upper_bound.extend([upper_bound_motor_angle] * mark_constants.MARK_DETAILS['motors_num'][self.mark])
return np.array(upper_bound)
def _get_observation_lower_bound(self):
lower_bound = -self._get_observation_upper_bound()
return lower_bound | /rex_gym-0.2.7.tar.gz/rex_gym-0.2.7/rex_gym/envs/gym/gallop_env.py | 0.702734 | 0.439627 | gallop_env.py | pypi |
import collections
import copy
import math
import re
import numpy as np
from . import motor, terrain, mark_constants, rex_constants
from ..util import pybullet_data
INIT_RACK_POSITION = [0, 0, 1]
INIT_ORIENTATION = [0, 0, 0, 1]
OVERHEAT_SHUTDOWN_TORQUE = 2.45
OVERHEAT_SHUTDOWN_TIME = 1.0
LEG_POSITION = ["front_left", "front_right", "rear_left", "rear_right"]
_CHASSIS_NAME_PATTERN = re.compile(r"chassis\D*")
_MOTOR_NAME_PATTERN = re.compile(r"motor\D*")
_FOOT_NAME_PATTERN = re.compile(r"foot_motor\D*")
_ARM_NAME_PATTERN = re.compile(r"arm\D*")
SENSOR_NOISE_STDDEV = (0.0, 0.0, 0.0, 0.0, 0.0)
TWO_PI = 2 * math.pi
def MapToMinusPiToPi(angles):
"""Maps a list of angles to [-pi, pi].
Args:
angles: A list of angles in rad.
Returns:
A list of angle mapped to [-pi, pi].
"""
mapped_angles = copy.deepcopy(angles)
for i in range(len(angles)):
mapped_angles[i] = math.fmod(angles[i], TWO_PI)
if mapped_angles[i] >= math.pi:
mapped_angles[i] -= TWO_PI
elif mapped_angles[i] < -math.pi:
mapped_angles[i] += TWO_PI
return mapped_angles
class Rex:
"""The Rex class that simulates a quadruped robot."""
def __init__(self,
pybullet_client,
urdf_root=pybullet_data.getDataPath(),
time_step=0.01,
action_repeat=1,
self_collision_enabled=False,
motor_velocity_limit=np.inf,
pd_control_enabled=False,
accurate_motor_model_enabled=False,
remove_default_joint_damping=False,
motor_kp=1.0,
motor_kd=0.02,
pd_latency=0.0,
control_latency=0.0,
observation_noise_stdev=SENSOR_NOISE_STDDEV,
torque_control_enabled=False,
motor_overheat_protection=False,
on_rack=False,
pose_id='stand',
terrain_id='plane',
mark='base'):
"""Constructs a Rex and reset it to the initial states.
Args:
pybullet_client: The instance of BulletClient to manage different
simulations.
urdf_root: The path to the urdf folder.
time_step: The time step of the simulation.
action_repeat: The number of ApplyAction() for each control step.
self_collision_enabled: Whether to enable self collision.
motor_velocity_limit: The upper limit of the motor velocity.
pd_control_enabled: Whether to use PD control for the motors.
accurate_motor_model_enabled: Whether to use the accurate DC motor model.
remove_default_joint_damping: Whether to remove the default joint damping.
motor_kp: proportional gain for the accurate motor model.
motor_kd: derivative gain for the accurate motor model.
pd_latency: The latency of the observations (in seconds) used to calculate
PD control. On the real hardware, it is the latency between the
microcontroller and the motor controller.
control_latency: The latency of the observations (in second) used to
calculate action. On the real hardware, it is the latency from the motor
controller, the microcontroller to the host (Nvidia TX2).
observation_noise_stdev: The standard deviation of a Gaussian noise model
for the sensor. It should be an array for separate sensors in the
following order [motor_angle, motor_velocity, motor_torque,
base_roll_pitch_yaw, base_angular_velocity]
torque_control_enabled: Whether to use the torque control, if set to
False, pose control will be used.
motor_overheat_protection: Whether to shutdown the motor that has exerted
large torque (OVERHEAT_SHUTDOWN_TORQUE) for an extended amount of time
(OVERHEAT_SHUTDOWN_TIME). See ApplyAction() in rex.py for more
details.
on_rack: Whether to place the Rex on rack. This is only used to debug
the walk gait. In this mode, the Rex's base is hanged midair so
that its walk gait is clearer to visualize.
"""
self.mark = mark
self.num_motors = mark_constants.MARK_DETAILS['motors_num'][self.mark]
self.num_legs = 4
self.motors_name = mark_constants.MARK_DETAILS['motors_names'][self.mark]
self._pybullet_client = pybullet_client
self._action_repeat = action_repeat
self._urdf_root = urdf_root
self._self_collision_enabled = self_collision_enabled
self._motor_velocity_limit = motor_velocity_limit
self._pd_control_enabled = pd_control_enabled
self._motor_direction = [1 for _ in range(self.num_motors)]
self._observed_motor_torques = np.zeros(self.num_motors)
self._applied_motor_torques = np.zeros(self.num_motors)
self._max_force = 3.5
self._pd_latency = pd_latency
self._control_latency = control_latency
self._observation_noise_stdev = observation_noise_stdev
self._accurate_motor_model_enabled = accurate_motor_model_enabled
self._remove_default_joint_damping = remove_default_joint_damping
self._observation_history = collections.deque(maxlen=100)
self._control_observation = []
self._chassis_link_ids = [-1]
self._leg_link_ids = []
self._motor_link_ids = []
self._foot_link_ids = []
self._torque_control_enabled = torque_control_enabled
self._motor_overheat_protection = motor_overheat_protection
self._on_rack = on_rack
self.pose_id = pose_id
# @TODO fix MotorModel
if self._accurate_motor_model_enabled:
self._kp = motor_kp
self._kd = motor_kd
self._motor_model = motor.MotorModel(motors_num=self.num_motors,
torque_control_enabled=self._torque_control_enabled,
kp=self._kp,
kd=self._kd)
elif self._pd_control_enabled:
self._kp = 8
self._kd = 0.3
else:
self._kp = 1
self._kd = 1
self.time_step = time_step
self._step_counter = 0
self.init_on_rack_position = INIT_RACK_POSITION
self.init_position = terrain.ROBOT_INIT_POSITION[terrain_id]
self.initial_pose = rex_constants.INIT_POSES[pose_id]
# reset_time=-1.0 means skipping the reset motion.
# See Reset for more details.
self.Reset(reset_time=-1)
def GetTimeSinceReset(self):
return self._step_counter * self.time_step
def Step(self, action):
for _ in range(self._action_repeat):
self.ApplyAction(action)
self._pybullet_client.stepSimulation()
self.ReceiveObservation()
self._step_counter += 1
def Terminate(self):
pass
def _RecordMassInfoFromURDF(self):
self._base_mass_urdf = []
for chassis_id in self._chassis_link_ids:
self._base_mass_urdf.append(
self._pybullet_client.getDynamicsInfo(self.quadruped, chassis_id)[0])
self._leg_masses_urdf = []
for leg_id in self._leg_link_ids:
self._leg_masses_urdf.append(
self._pybullet_client.getDynamicsInfo(self.quadruped, leg_id)[0])
for motor_id in self._motor_link_ids:
self._leg_masses_urdf.append(
self._pybullet_client.getDynamicsInfo(self.quadruped, motor_id)[0])
def _RecordInertiaInfoFromURDF(self):
"""Record the inertia of each body from URDF file."""
self._link_urdf = []
num_bodies = self._pybullet_client.getNumJoints(self.quadruped)
for body_id in range(-1, num_bodies): # -1 is for the base link.
inertia = self._pybullet_client.getDynamicsInfo(self.quadruped, body_id)[2]
self._link_urdf.append(inertia)
# We need to use id+1 to index self._link_urdf because it has the base
# (index = -1) at the first element.
self._base_inertia_urdf = [
self._link_urdf[chassis_id + 1] for chassis_id in self._chassis_link_ids
]
self._leg_inertia_urdf = [self._link_urdf[leg_id + 1] for leg_id in self._leg_link_ids]
self._leg_inertia_urdf.extend(
[self._link_urdf[motor_id + 1] for motor_id in self._motor_link_ids])
def _BuildJointNameToIdDict(self):
num_joints = self._pybullet_client.getNumJoints(self.quadruped)
self._joint_name_to_id = {}
for i in range(num_joints):
joint_info = self._pybullet_client.getJointInfo(self.quadruped, i)
self._joint_name_to_id[joint_info[1].decode("UTF-8")] = joint_info[0]
def _BuildUrdfIds(self):
"""Build the link Ids from its name in the URDF file."""
num_joints = self._pybullet_client.getNumJoints(self.quadruped)
self._chassis_link_ids = [-1]
# the self._leg_link_ids include both the upper and lower links of the leg.
self._leg_link_ids = []
self._motor_link_ids = []
self._foot_link_ids = []
self._arm_link_ids = []
for i in range(num_joints):
joint_info = self._pybullet_client.getJointInfo(self.quadruped, i)
joint_name = joint_info[1].decode("UTF-8")
joint_id = self._joint_name_to_id[joint_name]
if _CHASSIS_NAME_PATTERN.match(joint_name):
self._chassis_link_ids.append(joint_id)
elif _MOTOR_NAME_PATTERN.match(joint_name):
self._motor_link_ids.append(joint_id)
elif _FOOT_NAME_PATTERN.match(joint_name):
self._foot_link_ids.append(joint_id)
elif _ARM_NAME_PATTERN.match(joint_name):
self._arm_link_ids.append(joint_id)
else:
self._leg_link_ids.append(joint_id)
self._leg_link_ids.extend(self._foot_link_ids)
self._chassis_link_ids.sort()
self._motor_link_ids.sort()
self._foot_link_ids.sort()
self._leg_link_ids.sort()
self._arm_link_ids.sort()
def _RemoveDefaultJointDamping(self):
num_joints = self._pybullet_client.getNumJoints(self.quadruped)
for i in range(num_joints):
joint_info = self._pybullet_client.getJointInfo(self.quadruped, i)
self._pybullet_client.changeDynamics(joint_info[0], -1, linearDamping=0, angularDamping=0)
def _BuildMotorIdList(self):
self._motor_id_list = [self._joint_name_to_id[motor_name] for motor_name in self.motors_name]
@staticmethod
def IsObservationValid():
"""Whether the observation is valid for the current time step.
In simulation, observations are always valid. In real hardware, it may not
be valid from time to time when communication error happens.
Returns:
Whether the observation is valid for the current time step.
"""
return True
def Reset(self, reload_urdf=True, default_motor_angles=None, reset_time=3.0):
"""Reset the Rex to its initial states.
Args:
reload_urdf: Whether to reload the urdf file. If not, Reset() just place
the Rex back to its starting position.
default_motor_angles: The default motor angles. If it is None, Rex
will hold a default pose for 100 steps. In
torque control mode, the phase of holding the default pose is skipped.
reset_time: The duration (in seconds) to hold the default motor angles. If
reset_time <= 0 or in torque control mode, the phase of holding the
default pose is skipped.
"""
print("reset simulation")
if self._on_rack:
init_position = INIT_RACK_POSITION
else:
init_position = self.init_position
if reload_urdf:
if self._self_collision_enabled:
self.quadruped = self._pybullet_client.loadURDF(
pybullet_data.getDataPath() + f"/assets/urdf/{mark_constants.MARK_DETAILS['urdf_name'][self.mark]}",
init_position,
INIT_ORIENTATION,
useFixedBase=self._on_rack,
flags=self._pybullet_client.URDF_USE_SELF_COLLISION)
else:
self.quadruped = self._pybullet_client.loadURDF(
pybullet_data.getDataPath() + f"/assets/urdf/{mark_constants.MARK_DETAILS['urdf_name'][self.mark]}",
init_position,
INIT_ORIENTATION,
useFixedBase=self._on_rack)
self._BuildJointNameToIdDict()
self._BuildUrdfIds()
if self._remove_default_joint_damping:
self._RemoveDefaultJointDamping()
self._BuildMotorIdList()
self._RecordMassInfoFromURDF()
self._RecordInertiaInfoFromURDF()
self.ResetPose()
else:
self._pybullet_client.resetBasePositionAndOrientation(self.quadruped, init_position,
INIT_ORIENTATION)
self._pybullet_client.resetBaseVelocity(self.quadruped, [0, 0, 0], [0, 0, 0])
self.ResetPose()
self._overheat_counter = np.zeros(self.num_motors)
self._motor_enabled_list = [True] * self.num_motors
self._step_counter = 0
# Perform reset motion within reset_duration if in position control mode.
# Nothing is performed if in torque control mode for now.
self._observation_history.clear()
if reset_time > 0.0 and default_motor_angles is not None:
pose = self.initial_pose
if len(default_motor_angles) != mark_constants.MARK_DETAILS['motors_num'][self.mark]:
# extend with arm rest pose
default_motor_angles = np.concatenate((default_motor_angles, rex_constants.ARM_POSES["rest"]))
pose = np.concatenate((pose, rex_constants.ARM_POSES["rest"]))
self.ReceiveObservation()
for _ in range(100):
self.ApplyAction(pose)
self._pybullet_client.stepSimulation()
self.ReceiveObservation()
num_steps_to_reset = int(reset_time / self.time_step)
for _ in range(num_steps_to_reset):
self.ApplyAction(default_motor_angles)
self._pybullet_client.stepSimulation()
self.ReceiveObservation()
self.ReceiveObservation()
def _SetMotorTorqueById(self, motor_id, torque):
self._pybullet_client.setJointMotorControl2(bodyIndex=self.quadruped,
jointIndex=motor_id,
controlMode=self._pybullet_client.TORQUE_CONTROL,
force=torque)
def _SetDesiredMotorAngleById(self, motor_id, desired_angle):
self._pybullet_client.setJointMotorControl2(bodyIndex=self.quadruped,
jointIndex=motor_id,
controlMode=self._pybullet_client.POSITION_CONTROL,
targetPosition=desired_angle,
positionGain=self._kp,
velocityGain=self._kd,
force=self._max_force)
def SetDesiredMotorAngleByName(self, motor_name, desired_angle):
self._SetDesiredMotorAngleById(self._joint_name_to_id[motor_name], desired_angle)
def ResetPose(self):
"""Reset the pose of the Rex."""
for i in range(self.num_legs):
self._ResetPoseForLeg(i)
if self.num_motors > 12:
# set the remaining motors
self._ResetArmMotors()
def _ResetPoseForLeg(self, leg_id):
"""Reset the initial pose for the leg.
Args:
leg_id: It should be 0, 1, 2, or 3, which represents the leg at
front_left, back_left, front_right and back_right.
"""
leg_position = LEG_POSITION[leg_id]
self._pybullet_client.resetJointState(self.quadruped,
self._joint_name_to_id[f"motor_{leg_position}_shoulder"],
rex_constants.INIT_POSES[self.pose_id][3 * leg_id],
targetVelocity=0)
self._pybullet_client.resetJointState(self.quadruped,
self._joint_name_to_id[f"motor_{leg_position}_leg"],
rex_constants.INIT_POSES[self.pose_id][3 * leg_id + 1],
targetVelocity=0)
self._pybullet_client.resetJointState(self.quadruped,
self._joint_name_to_id[f"foot_motor_{leg_position}"],
rex_constants.INIT_POSES[self.pose_id][3 * leg_id + 2],
targetVelocity=0)
if self._accurate_motor_model_enabled or self._pd_control_enabled:
# Disable the default motor in pybullet.
self._pybullet_client.setJointMotorControl2(
bodyIndex=self.quadruped,
jointIndex=(self._joint_name_to_id[f"motor_{leg_position}_shoulder"]),
controlMode=self._pybullet_client.VELOCITY_CONTROL,
targetVelocity=0,
force=0)
self._pybullet_client.setJointMotorControl2(
bodyIndex=self.quadruped,
jointIndex=(self._joint_name_to_id[f"motor_{leg_position}_leg"]),
controlMode=self._pybullet_client.VELOCITY_CONTROL,
targetVelocity=0,
force=0)
self._pybullet_client.setJointMotorControl2(
bodyIndex=self.quadruped,
jointIndex=(self._joint_name_to_id[f"foot_motor_{leg_position}"]),
controlMode=self._pybullet_client.VELOCITY_CONTROL,
targetVelocity=0,
force=0)
def _ResetArmMotors(self):
for i in range(len(mark_constants.ARM_MOTOR_NAMES)):
self._pybullet_client.resetJointState(self.quadruped,
self._joint_name_to_id[mark_constants.ARM_MOTOR_NAMES[i]],
rex_constants.ARM_POSES['rest'][i],
targetVelocity=0)
if self._accurate_motor_model_enabled or self._pd_control_enabled:
# Disable the default motor in pybullet.
self._pybullet_client.setJointMotorControl2(
bodyIndex=self.quadruped,
jointIndex=(self._joint_name_to_id[mark_constants.ARM_MOTOR_NAMES[i]]),
controlMode=self._pybullet_client.VELOCITY_CONTROL,
targetVelocity=0,
force=0)
def GetBasePosition(self):
"""Get the position of Rex's base.
Returns:
The position of Rex's base.
"""
position, _ = (self._pybullet_client.getBasePositionAndOrientation(self.quadruped))
return position
def GetTrueBaseRollPitchYaw(self):
"""Get Rex's base orientation in euler angle in the world frame.
Returns:
A tuple (roll, pitch, yaw) of the base in world frame.
"""
orientation = self.GetTrueBaseOrientation()
roll_pitch_yaw = self._pybullet_client.getEulerFromQuaternion(orientation)
return np.asarray(roll_pitch_yaw)
def GetBaseRollPitchYaw(self):
"""Get Rex's base orientation in euler angle in the world frame.
This function mimics the noisy sensor reading and adds latency.
Returns:
A tuple (roll, pitch, yaw) of the base in world frame polluted by noise
and latency.
"""
delayed_orientation = np.array(
self._control_observation[3 * self.num_motors:3 * self.num_motors + 4])
delayed_roll_pitch_yaw = self._pybullet_client.getEulerFromQuaternion(delayed_orientation)
roll_pitch_yaw = self._AddSensorNoise(np.array(delayed_roll_pitch_yaw),
self._observation_noise_stdev[3])
return roll_pitch_yaw
def GetTrueMotorAngles(self):
"""Gets the motor angles at the current moment, mapped to [-pi, pi].
Returns:
Motor angles, mapped to [-pi, pi].
"""
motor_angles = [
self._pybullet_client.getJointState(self.quadruped, motor_id)[0]
for motor_id in self._motor_id_list
]
motor_angles = np.multiply(motor_angles, self._motor_direction)
return motor_angles
def GetMotorAngles(self):
"""Gets the motor angles.
This function mimicks the noisy sensor reading and adds latency. The motor
angles that are delayed, noise polluted, and mapped to [-pi, pi].
Returns:
Motor angles polluted by noise and latency, mapped to [-pi, pi].
"""
motor_angles = self._AddSensorNoise(np.array(self._control_observation[0:self.num_motors]),
self._observation_noise_stdev[0])
return MapToMinusPiToPi(motor_angles)
def GetTrueMotorVelocities(self):
"""Get the velocity of all eight motors.
Returns:
Velocities of all eight motors.
"""
motor_velocities = [
self._pybullet_client.getJointState(self.quadruped, motor_id)[1]
for motor_id in self._motor_id_list
]
motor_velocities = np.multiply(motor_velocities, self._motor_direction)
return motor_velocities
def GetMotorVelocities(self):
"""Get the velocity of all eight motors.
This function mimicks the noisy sensor reading and adds latency.
Returns:
Velocities of all eight motors polluted by noise and latency.
"""
return self._AddSensorNoise(
np.array(self._control_observation[self.num_motors:2 * self.num_motors]),
self._observation_noise_stdev[1])
def GetTrueMotorTorques(self):
"""Get the amount of torque the motors are exerting.
Returns:
Motor torques of all eight motors.
"""
if self._accurate_motor_model_enabled or self._pd_control_enabled:
return self._observed_motor_torques
else:
motor_torques = [
self._pybullet_client.getJointState(self.quadruped, motor_id)[3]
for motor_id in self._motor_id_list
]
motor_torques = np.multiply(motor_torques, self._motor_direction)
return motor_torques
def GetMotorTorques(self):
"""Get the amount of torque the motors are exerting.
This function mimicks the noisy sensor reading and adds latency.
Returns:
Motor torques of all eight motors polluted by noise and latency.
"""
return self._AddSensorNoise(
np.array(self._control_observation[2 * self.num_motors:3 * self.num_motors]),
self._observation_noise_stdev[2])
def GetTrueBaseOrientation(self):
"""Get the orientation of Rex's base, represented as quaternion.
Returns:
The orientation of Rex's base.
"""
_, orientation = (self._pybullet_client.getBasePositionAndOrientation(self.quadruped))
return orientation
def GetBaseOrientation(self):
"""Get the orientation of Rex's base, represented as quaternion.
This function mimicks the noisy sensor reading and adds latency.
Returns:
The orientation of Rex's base polluted by noise and latency.
"""
return self._pybullet_client.getQuaternionFromEuler(self.GetBaseRollPitchYaw())
def GetTrueBaseRollPitchYawRate(self):
"""Get the rate of orientation change of the Rex's base in euler angle.
Returns:
rate of (roll, pitch, yaw) change of the Rex's base.
"""
vel = self._pybullet_client.getBaseVelocity(self.quadruped)
return np.asarray([vel[1][0], vel[1][1], vel[1][2]])
def GetBaseRollPitchYawRate(self):
"""Get the rate of orientation change of the Rex's base in euler angle.
This function mimicks the noisy sensor reading and adds latency.
Returns:
rate of (roll, pitch, yaw) change of the Rex's base polluted by noise
and latency.
"""
return self._AddSensorNoise(
np.array(self._control_observation[3 * self.num_motors + 4:3 * self.num_motors + 7]),
self._observation_noise_stdev[4])
def GetActionDimension(self):
"""Get the length of the action list.
Returns:
The length of the action list.
"""
return self.num_motors
def ApplyAction(self, motor_commands, motor_kps=None, motor_kds=None):
"""Set the desired motor angles to the motors of the Rex.
The desired motor angles are clipped based on the maximum allowed velocity.
If the pd_control_enabled is True, a torque is calculated according to
the difference between current and desired joint angle, as well as the joint
velocity. This torque is exerted to the motor. For more information about
PD control, please refer to: https://en.wikipedia.org/wiki/PID_controller.
Args:
motor_commands: The eight desired motor angles.
motor_kps: Proportional gains for the motor model. If not provided, it
uses the default kp of the Rex for all the motors.
motor_kds: Derivative gains for the motor model. If not provided, it
uses the default kd of the Rex for all the motors.
"""
if self._motor_velocity_limit < np.inf:
current_motor_angle = self.GetTrueMotorAngles()
motor_commands_max = (current_motor_angle + self.time_step * self._motor_velocity_limit)
motor_commands_min = (current_motor_angle - self.time_step * self._motor_velocity_limit)
motor_commands = np.clip(motor_commands, motor_commands_min, motor_commands_max)
# Set the kp and kd for all the motors if not provided as an argument.
if motor_kps is None:
motor_kps = np.full(self.num_motors, self._kp)
if motor_kds is None:
motor_kds = np.full(self.num_motors, self._kd)
if self._accurate_motor_model_enabled or self._pd_control_enabled:
q, qdot = self._GetPDObservation()
qdot_true = self.GetTrueMotorVelocities()
if self._accurate_motor_model_enabled:
actual_torque, observed_torque = self._motor_model.convert_to_torque(
motor_commands, q, qdot, qdot_true, motor_kps, motor_kds)
if self._motor_overheat_protection:
for i in range(self.num_motors):
if abs(actual_torque[i]) > OVERHEAT_SHUTDOWN_TORQUE:
self._overheat_counter[i] += 1
else:
self._overheat_counter[i] = 0
if self._overheat_counter[i] > OVERHEAT_SHUTDOWN_TIME / self.time_step:
self._motor_enabled_list[i] = False
# The torque is already in the observation space because we use
# GetMotorAngles and GetMotorVelocities.
self._observed_motor_torques = observed_torque
# Transform into the motor space when applying the torque.
self._applied_motor_torque = np.multiply(actual_torque, self._motor_direction)
for motor_id, motor_torque, motor_enabled in zip(self._motor_id_list,
self._applied_motor_torque,
self._motor_enabled_list):
if motor_enabled:
self._SetMotorTorqueById(motor_id, motor_torque)
else:
self._SetMotorTorqueById(motor_id, 0)
else:
torque_commands = -1 * motor_kps * (q - motor_commands) - motor_kds * qdot
# The torque is already in the observation space because we use
# GetMotorAngles and GetMotorVelocities.
self._observed_motor_torques = torque_commands
# Transform into the motor space when applying the torque.
self._applied_motor_torques = np.multiply(self._observed_motor_torques,
self._motor_direction)
for motor_id, motor_torque in zip(self._motor_id_list, self._applied_motor_torques):
self._SetMotorTorqueById(motor_id, motor_torque)
else:
motor_commands_with_direction = np.multiply(motor_commands, self._motor_direction)
for motor_id, motor_command_with_direction in zip(self._motor_id_list,
motor_commands_with_direction):
self._SetDesiredMotorAngleById(motor_id, motor_command_with_direction)
def GetBaseMassesFromURDF(self):
"""Get the mass of the base from the URDF file."""
return self._base_mass_urdf
def GetBaseInertiasFromURDF(self):
"""Get the inertia of the base from the URDF file."""
return self._base_inertia_urdf
def GetLegMassesFromURDF(self):
"""Get the mass of the legs from the URDF file."""
return self._leg_masses_urdf
def GetLegInertiasFromURDF(self):
"""Get the inertia of the legs from the URDF file."""
return self._leg_inertia_urdf
def SetBaseMasses(self, base_mass):
"""Set the mass of Rex's base.
Args:
base_mass: A list of masses of each body link in CHASIS_LINK_IDS. The
length of this list should be the same as the length of CHASIS_LINK_IDS.
Raises:
ValueError: It is raised when the length of base_mass is not the same as
the length of self._chassis_link_ids.
"""
if len(base_mass) != len(self._chassis_link_ids):
raise ValueError("The length of base_mass {} and self._chassis_link_ids {} are not "
"the same.".format(len(base_mass), len(self._chassis_link_ids)))
for chassis_id, chassis_mass in zip(self._chassis_link_ids, base_mass):
self._pybullet_client.changeDynamics(self.quadruped, chassis_id, mass=chassis_mass)
def SetLegMasses(self, leg_masses):
"""Set the mass of the legs.
Args:
leg_masses: The leg and motor masses for all the leg links and motors.
Raises:
ValueError: It is raised when the length of masses is not equal to number
of links + motors.
"""
if len(leg_masses) != len(self._leg_link_ids) + len(self._motor_link_ids):
raise ValueError("The number of values passed to SetLegMasses are "
"different than number of leg links and motors.")
for leg_id, leg_mass in zip(self._leg_link_ids, leg_masses):
self._pybullet_client.changeDynamics(self.quadruped, leg_id, mass=leg_mass)
motor_masses = leg_masses[len(self._leg_link_ids):]
for link_id, motor_mass in zip(self._motor_link_ids, motor_masses):
self._pybullet_client.changeDynamics(self.quadruped, link_id, mass=motor_mass)
def SetBaseInertias(self, base_inertias):
"""Set the inertias of Rex's base.
Args:
base_inertias: A list of inertias of each body link in CHASIS_LINK_IDS.
The length of this list should be the same as the length of
CHASIS_LINK_IDS.
Raises:
ValueError: It is raised when the length of base_inertias is not the same
as the length of self._chassis_link_ids and base_inertias contains
negative values.
"""
if len(base_inertias) != len(self._chassis_link_ids):
raise ValueError("The length of base_inertias {} and self._chassis_link_ids {} are "
"not the same.".format(len(base_inertias), len(self._chassis_link_ids)))
for chassis_id, chassis_inertia in zip(self._chassis_link_ids, base_inertias):
for inertia_value in chassis_inertia:
if (np.asarray(inertia_value) < 0).any():
raise ValueError("Values in inertia matrix should be non-negative.")
self._pybullet_client.changeDynamics(self.quadruped,
chassis_id,
localInertiaDiagonal=chassis_inertia)
def GetTrueObservation(self):
observation = []
observation.extend(self.GetTrueMotorAngles())
observation.extend(self.GetTrueMotorVelocities())
observation.extend(self.GetTrueMotorTorques())
observation.extend(self.GetTrueBaseOrientation())
observation.extend(self.GetTrueBaseRollPitchYawRate())
return observation
def ReceiveObservation(self):
"""Receive the observation from sensors.
This function is called once per step. The observations are only updated
when this function is called.
"""
self._observation_history.appendleft(self.GetTrueObservation())
self._control_observation = self._GetControlObservation()
def _GetDelayedObservation(self, latency):
"""Get observation that is delayed by the amount specified in latency.
Args:
latency: The latency (in seconds) of the delayed observation.
Returns:
observation: The observation which was actually latency seconds ago.
"""
if latency <= 0 or len(self._observation_history) == 1:
observation = self._observation_history[0]
else:
n_steps_ago = int(latency / self.time_step)
if n_steps_ago + 1 >= len(self._observation_history):
return self._observation_history[-1]
remaining_latency = latency - n_steps_ago * self.time_step
blend_alpha = remaining_latency / self.time_step
observation = ((1.0 - blend_alpha) * np.array(self._observation_history[n_steps_ago]) +
blend_alpha * np.array(self._observation_history[n_steps_ago + 1]))
return observation
def _GetPDObservation(self):
pd_delayed_observation = self._GetDelayedObservation(self._pd_latency)
q = pd_delayed_observation[0:self.num_motors]
qdot = pd_delayed_observation[self.num_motors:2 * self.num_motors]
return np.array(q), np.array(qdot)
def _GetControlObservation(self):
control_delayed_observation = self._GetDelayedObservation(self._control_latency)
return control_delayed_observation
def _AddSensorNoise(self, sensor_values, noise_stdev):
if noise_stdev <= 0:
return sensor_values
observation = sensor_values + np.random.normal(scale=noise_stdev, size=sensor_values.shape)
return observation
def SetTimeSteps(self, action_repeat, simulation_step):
"""Set the time steps of the control and simulation.
Args:
action_repeat: The number of simulation steps that the same action is
repeated.
simulation_step: The simulation time step.
"""
self.time_step = simulation_step
self._action_repeat = action_repeat
@property
def chassis_link_ids(self):
return self._chassis_link_ids | /rex_gym-0.2.7.tar.gz/rex_gym-0.2.7/rex_gym/model/rex.py | 0.726231 | 0.29908 | rex.py | pypi |
import numpy as np
# TODO: set params to match mg996r servo
VOLTAGE_CLIPPING = 50
# TODO: Clamp the pwm signal instead of the OBSERVED_TORQUE_LIMIT.
OBSERVED_TORQUE_LIMIT = 5.7
MOTOR_VOLTAGE = 32.0
MOTOR_RESISTANCE = 0.186
MOTOR_TORQUE_CONSTANT = 0.0954
MOTOR_VISCOUS_DAMPING = 0
MOTOR_SPEED_LIMIT = MOTOR_VOLTAGE / (MOTOR_VISCOUS_DAMPING + MOTOR_TORQUE_CONSTANT)
class MotorModel:
"""The accurate motor model, which is based on the physics of DC motors.
The motor model support two types of control: position control and torque
control. In position control mode, a desired motor angle is specified, and a
torque is computed based on the internal motor model. When the torque control
is specified, a pwm signal in the range of [-1.0, 1.0] is converted to the
torque.
The internal motor model takes the following factors into consideration:
pd gains, viscous friction, back-EMF voltage and current-torque profile.
"""
def __init__(self, motors_num, torque_control_enabled=False, kp=1.2, kd=0):
self._motors_num = motors_num
self._torque_control_enabled = torque_control_enabled
self._kp = kp
self._kd = kd
self._resistance = MOTOR_RESISTANCE
self._voltage = MOTOR_VOLTAGE
self._torque_constant = MOTOR_TORQUE_CONSTANT
self._viscous_damping = MOTOR_VISCOUS_DAMPING
self._current_table = [0, 10, 20, 30, 40, 50, 60]
self._torque_table = [0, 1, 1.9, 2.45, 3.0, 3.25, 3.5]
self._strength_ratios = [1.0] * self._motors_num
def set_strength_ratios(self, ratios):
"""Set the strength of each motors relative to the default value.
Args:
ratios: The relative strength of motor output. A numpy array ranging from
0.0 to 1.0.
"""
self._strength_ratios = np.array(ratios)
def set_motor_gains(self, kp, kd):
"""Set the gains of all motors.
These gains are PD gains for motor positional control. kp is the
proportional gain and kd is the derivative gain.
Args:
kp: proportional gain of the motors.
kd: derivative gain of the motors.
"""
self._kp = kp
self._kd = kd
def set_voltage(self, voltage):
self._voltage = voltage
def get_voltage(self):
return self._voltage
def set_viscous_damping(self, viscous_damping):
self._viscous_damping = viscous_damping
def get_viscous_dampling(self):
return self._viscous_damping
def convert_to_torque(self,
motor_commands,
motor_angle,
motor_velocity,
true_motor_velocity,
kp=None,
kd=None):
"""Convert the commands (position control or torque control) to torque.
Args:
motor_commands: The desired motor angle if the motor is in position
control mode. The pwm signal if the motor is in torque control mode.
motor_angle: The motor angle observed at the current time step. It is
actually the true motor angle observed a few milliseconds ago (pd
latency).
motor_velocity: The motor velocity observed at the current time step, it
is actually the true motor velocity a few milliseconds ago (pd latency).
true_motor_velocity: The true motor velocity. The true velocity is used
to compute back EMF voltage and viscous damping.
kp: Proportional gains for the motors' PD controllers. If not provided, it
uses the default kp of Rex for all the motors.
kd: Derivative gains for the motors' PD controllers. If not provided, it
uses the default kp of Rex for all the motors.
Returns:
actual_torque: The torque that needs to be applied to the motor.
observed_torque: The torque observed by the sensor.
"""
if self._torque_control_enabled:
pwm = motor_commands
else:
if kp is None:
kp = np.full(self._motors_num, self._kp)
if kd is None:
kd = np.full(self._motors_num, self._kd)
pwm = -1 * kp * (motor_angle - motor_commands) - kd * motor_velocity
pwm = np.clip(pwm, -1.0, 1.0)
return self._convert_to_torque_from_pwm(pwm, true_motor_velocity)
def _convert_to_torque_from_pwm(self, pwm, true_motor_velocity):
"""Convert the pwm signal to torque.
Args:
pwm: The pulse width modulation.
true_motor_velocity: The true motor velocity at the current moment. It is
used to compute the back EMF voltage and the viscous damping.
Returns:
actual_torque: The torque that needs to be applied to the motor.
observed_torque: The torque observed by the sensor.
"""
observed_torque = np.clip(
self._torque_constant * (np.asarray(pwm) * self._voltage / self._resistance),
-OBSERVED_TORQUE_LIMIT, OBSERVED_TORQUE_LIMIT)
# Net voltage is clipped at 50V by diodes on the motor controller.
voltage_net = np.clip(
np.asarray(pwm) * self._voltage -
(self._torque_constant + self._viscous_damping) * np.asarray(true_motor_velocity),
-VOLTAGE_CLIPPING, VOLTAGE_CLIPPING)
current = voltage_net / self._resistance
current_sign = np.sign(current)
current_magnitude = np.absolute(current)
# Saturate torque based on empirical current relation.
actual_torque = np.interp(current_magnitude, self._current_table, self._torque_table)
actual_torque = np.multiply(current_sign, actual_torque)
actual_torque = np.multiply(self._strength_ratios, actual_torque)
return actual_torque, observed_torque | /rex_gym-0.2.7.tar.gz/rex_gym-0.2.7/rex_gym/model/motor.py | 0.681939 | 0.479991 | motor.py | pypi |
import time
import numpy as np
from rex_gym.model.kinematics import Kinematics
class GaitPlanner:
def __init__(self, mode):
self._frame = np.zeros([4, 3])
self._phi = 0.
self._phi_stance = 0.
self._last_time = 0.
self._alpha = 0.
self._s = False
if mode == "walk":
self._offset = np.array([0., 0.5, 0.5, 0.])
self.step_offset = 0.5
else:
self._offset = np.array([0., 0., 0.8, 0.8])
self.step_offset = 0.5
@staticmethod
def solve_bin_factor(n, k):
return np.math.factorial(n) / (np.math.factorial(k) * np.math.factorial(n - k))
def bezier_curve(self, t, k, point):
n = 11
return point * self.solve_bin_factor(n, k) * np.power(t, k) * np.power(1 - t, n - k)
@staticmethod
def calculate_stance(phi_st, v, angle):
c = np.cos(np.deg2rad(angle))
s = np.sin(np.deg2rad(angle))
A = 0.001
half_l = 0.05
p_stance = half_l * (1 - 2 * phi_st)
stance_x = c * p_stance * np.abs(v)
stance_y = -s * p_stance * np.abs(v)
stance_z = -A * np.cos(np.pi / (2 * half_l) * p_stance)
return stance_x, stance_y, stance_z
def calculate_bezier_swing(self, phi_sw, v, angle, direction):
c = np.cos(np.deg2rad(angle))
s = np.sin(np.deg2rad(angle))
X = np.abs(v) * c * np.array([-0.04, -0.056, -0.06, -0.06, -0.06, 0.,
0., 0., 0.06, 0.06, 0.056, 0.04]) * direction
Y = np.abs(v) * s * (-X)
Z = np.abs(v) * np.array([0., 0., 0.0405, 0.0405, 0.0405, 0.0405,
0.0405, 0.0495, 0.0495, 0.0495, 0., 0.])
swing_x = 0.
swing_y = 0.
swing_z = 0.
# TODO Use all 12 points
for i in range(10):
swing_x = swing_x + self.bezier_curve(phi_sw, i, X[i])
swing_y = swing_y + self.bezier_curve(phi_sw, i, Y[i])
swing_z = swing_z + self.bezier_curve(phi_sw, i, Z[i])
return swing_x, swing_y, swing_z
def step_trajectory(self, phi, v, angle, w_rot, center_to_foot, direction):
if phi >= 1:
phi = phi - 1.
r = np.sqrt(center_to_foot[0] ** 2 + center_to_foot[1] ** 2)
foot_angle = np.arctan2(center_to_foot[1], center_to_foot[0])
if w_rot >= 0.:
circle_trajectory = 90. - np.rad2deg(foot_angle - self._alpha)
else:
circle_trajectory = 270. - np.rad2deg(foot_angle - self._alpha)
if phi <= self.step_offset:
# stance phase
phi_stance = phi / self.step_offset
stepX_long, stepY_long, stepZ_long = self.calculate_stance(phi_stance, v, angle)
stepX_rot, stepY_rot, stepZ_rot = self.calculate_stance(phi_stance, w_rot, circle_trajectory)
else:
# swing phase
phiSwing = (phi - self.step_offset) / (1 - self.step_offset)
stepX_long, stepY_long, stepZ_long = self.calculate_bezier_swing(phiSwing, v, angle, direction)
stepX_rot, stepY_rot, stepZ_rot = self.calculate_bezier_swing(phiSwing, w_rot, circle_trajectory, direction)
if center_to_foot[1] > 0:
if stepX_rot < 0:
self._alpha = -np.arctan2(np.sqrt(stepX_rot ** 2 + stepY_rot ** 2), r)
else:
self._alpha = np.arctan2(np.sqrt(stepX_rot ** 2 + stepY_rot ** 2), r)
else:
if stepX_rot < 0:
self._alpha = np.arctan2(np.sqrt(stepX_rot ** 2 + stepY_rot ** 2), r)
else:
self._alpha = -np.arctan2(np.sqrt(stepX_rot ** 2 + stepY_rot ** 2), r)
coord = np.empty(3)
coord[0] = stepX_long + stepX_rot
coord[1] = stepY_long + stepY_rot
coord[2] = stepZ_long + stepZ_rot
return coord
def loop(self, v, angle, w_rot, t, direction, frames=None):
if frames is None:
k_obj = Kinematics()
x_dist = k_obj.x_dist
y_dist = k_obj.y_dist
height = k_obj.height
frames = np.asmatrix([[x_dist / 2, -y_dist / 2, -height],
[x_dist / 2, y_dist / 2, -height],
[-x_dist / 2, -y_dist / 2, -height],
[-x_dist / 2, y_dist / 2, -height]])
if t <= 0.01:
t = 0.01
if self._phi >= 0.99:
self._last_time = time.time()
self._phi = (time.time() - self._last_time) / t
step_coord = self.step_trajectory(self._phi + self._offset[0], v, angle, w_rot,
np.squeeze(np.asarray(frames[0, :])), direction) # FR
self._frame[0, 0] = frames[0, 0] + step_coord[0]
self._frame[0, 1] = frames[0, 1] + step_coord[1]
self._frame[0, 2] = frames[0, 2] + step_coord[2]
step_coord = self.step_trajectory(self._phi + self._offset[1], v, angle, w_rot,
np.squeeze(np.asarray(frames[1, :])), direction) # FL
self._frame[1, 0] = frames[1, 0] + step_coord[0]
self._frame[1, 1] = frames[1, 1] + step_coord[1]
self._frame[1, 2] = frames[1, 2] + step_coord[2]
step_coord = self.step_trajectory(self._phi + self._offset[2], v, angle, w_rot,
np.squeeze(np.asarray(frames[2, :])), direction) # BR
self._frame[2, 0] = frames[2, 0] + step_coord[0]
self._frame[2, 1] = frames[2, 1] + step_coord[1]
self._frame[2, 2] = frames[2, 2] + step_coord[2]
step_coord = self.step_trajectory(self._phi + self._offset[3], v, angle, w_rot,
np.squeeze(np.asarray(frames[3, :])), direction) # BL
self._frame[3, 0] = frames[3, 0] + step_coord[0]
self._frame[3, 1] = frames[3, 1] + step_coord[1]
self._frame[3, 2] = frames[3, 2] + step_coord[2]
return self._frame | /rex_gym-0.2.7.tar.gz/rex_gym-0.2.7/rex_gym/model/gait_planner.py | 0.537284 | 0.446253 | gait_planner.py | pypi |
import numpy as np
class Kinematics:
def __init__(self):
self._l = 0.23
self._w = 0.075
self._hip = 0.055
self._leg = 0.10652
self._foot = 0.145
self.y_dist = 0.185
self.x_dist = self._l
self.height = 0.2
# frame vectors
self._hip_front_right_v = np.array([self._l / 2, -self._w / 2, 0])
self._hip_front_left_v = np.array([self._l / 2, self._w / 2, 0])
self._hip_rear_right_v = np.array([-self._l / 2, -self._w / 2, 0])
self._hip_rear_left_v = np.array([-self._l / 2, self._w / 2, 0])
self._foot_front_right_v = np.array([self.x_dist / 2, -self.y_dist / 2, -self.height])
self._foot_front_left_v = np.array([self.x_dist / 2, self.y_dist / 2, -self.height])
self._foot_rear_right_v = np.array([-self.x_dist / 2, -self.y_dist / 2, -self.height])
self._foot_rear_left_v = np.array([-self.x_dist / 2, self.y_dist / 2, -self.height])
self._frames = np.asmatrix([[self.x_dist / 2, -self.y_dist / 2, -self.height],
[self.x_dist / 2, self.y_dist / 2, -self.height],
[-self.x_dist / 2, -self.y_dist / 2, -self.height],
[-self.x_dist / 2, self.y_dist / 2, -self.height]])
@staticmethod
def get_Rx(x):
return np.asmatrix([[1, 0, 0, 0],
[0, np.cos(x), -np.sin(x), 0],
[0, np.sin(x), np.cos(x), 0],
[0, 0, 0, 1]])
@staticmethod
def get_Ry(y):
return np.asmatrix([[np.cos(y), 0, np.sin(y), 0],
[0, 1, 0, 0],
[-np.sin(y), 0, np.cos(y), 0],
[0, 0, 0, 1]])
@staticmethod
def get_Rz(z):
return np.asmatrix([[np.cos(z), -np.sin(z), 0, 0],
[np.sin(z), np.cos(z), 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 1]])
def get_Rxyz(self, x, y, z):
if x != 0 or y != 0 or z != 0:
R = self.get_Rx(x) * self.get_Ry(y) * self.get_Rz(z)
return R
else:
return np.identity(4)
def get_RT(self, orientation, position):
roll = orientation[0]
pitch = orientation[1]
yaw = orientation[2]
x0 = position[0]
y0 = position[1]
z0 = position[2]
translation = np.asmatrix([[1, 0, 0, x0],
[0, 1, 0, y0],
[0, 0, 1, z0],
[0, 0, 0, 1]])
rotation = self.get_Rxyz(roll, pitch, yaw)
return rotation * translation
def transform(self, coord, rotation, translation):
vector = np.array([[coord[0]],
[coord[1]],
[coord[2]],
[1]])
transform_vector = self.get_RT(rotation, translation) * vector
return np.array([transform_vector[0, 0], transform_vector[1, 0], transform_vector[2, 0]])
@staticmethod
def check_domain(domain):
if domain > 1 or domain < -1:
if domain > 1:
domain = 0.99
else:
domain = -0.99
return domain
def _solve_IK(self, coord, hip, leg, foot, right_side):
domain = (coord[1] ** 2 + (-coord[2]) ** 2 - hip ** 2 + (-coord[0]) ** 2 - leg ** 2 - foot ** 2) / (2 * foot * leg)
domain = self.check_domain(domain)
gamma = np.arctan2(-np.sqrt(1 - domain ** 2), domain)
sqrt_value = coord[1] ** 2 + (-coord[2]) ** 2 - hip ** 2
if sqrt_value < 0.0:
sqrt_value = 0.0
alpha = np.arctan2(-coord[0], np.sqrt(sqrt_value)) - np.arctan2(foot * np.sin(gamma), leg + foot * np.cos(gamma))
hip_val = hip
if right_side:
hip_val = -hip
theta = -np.arctan2(coord[2], coord[1]) - np.arctan2(np.sqrt(sqrt_value), hip_val)
angles = np.array([theta, -alpha, -gamma])
return angles
def solve(self, orientation, position, frames=None):
if frames is not None:
self._frames = frames
foot_front_right = np.asarray([self._frames[0, 0], self._frames[0, 1], self._frames[0, 2]])
foot_front_left = np.asarray([self._frames[1, 0], self._frames[1, 1], self._frames[1, 2]])
foot_rear_right = np.asarray([self._frames[2, 0], self._frames[2, 1], self._frames[2, 2]])
foot_rear_left = np.asarray([self._frames[3, 0], self._frames[3, 1], self._frames[3, 2]])
# rotation vertices
hip_front_right_vertex = self.transform(self._hip_front_right_v, orientation, position)
hip_front_left_vertex = self.transform(self._hip_front_left_v, orientation, position)
hip_rear_right_vertex = self.transform(self._hip_rear_right_v, orientation, position)
hip_rear_left_vertex = self.transform(self._hip_rear_left_v, orientation, position)
# leg vectors
front_right_coord = foot_front_right - hip_front_right_vertex
front_left_coord = foot_front_left - hip_front_left_vertex
rear_right_coord = foot_rear_right - hip_rear_right_vertex
rear_left_coord = foot_rear_left - hip_rear_left_vertex
# leg vectors transformation
inv_orientation = -orientation
inv_position = -position
t_front_right_coord = self.transform(front_right_coord, inv_orientation, inv_position)
t_front_left_coord = self.transform(front_left_coord, inv_orientation, inv_position)
t_rear_right_coord = self.transform(rear_right_coord, inv_orientation, inv_position)
t_rear_left_coord = self.transform(rear_left_coord, inv_orientation, inv_position)
# solve IK
front_right_angles = self._solve_IK(t_front_right_coord, self._hip, self._leg, self._foot, True)
front_left_angles = self._solve_IK(t_front_left_coord, self._hip, self._leg, self._foot, False)
rear_right_angles = self._solve_IK(t_rear_right_coord, self._hip, self._leg, self._foot, True)
rear_left_angles = self._solve_IK(t_rear_left_coord, self._hip, self._leg, self._foot, False)
t_front_right = hip_front_right_vertex + t_front_right_coord
t_front_left = hip_front_left_vertex + t_front_left_coord
t_rear_right = hip_rear_right_vertex + t_rear_right_coord
t_rear_left = hip_rear_left_vertex + t_rear_left_coord
t_frames = np.asmatrix([[t_front_right[0], t_front_right[1], t_front_right[2]],
[t_front_left[0], t_front_left[1], t_front_left[2]],
[t_rear_right[0], t_rear_right[1], t_rear_right[2]],
[t_rear_left[0], t_rear_left[1], t_rear_left[2]]])
return front_right_angles, front_left_angles, rear_right_angles, rear_left_angles, t_frames | /rex_gym-0.2.7.tar.gz/rex_gym-0.2.7/rex_gym/model/kinematics.py | 0.825308 | 0.390766 | kinematics.py | pypi |
import abc
import time
from threading import RLock
from concurrent.futures import ThreadPoolExecutor, Future, CancelledError
from typing import Callable, List, Tuple, Optional, Any, Union, Deque, Dict
from collections import deque
import traceback
import jumpy as jp
import jax.numpy as jnp # todo: replace with jumpy as jp.ndarray?
import jax.random as rnd
import numpy as onp
from flax.core import FrozenDict
from jax import jit
from rex.base import GraphState, StepState, InputState, State, Output, Params
from rex.constants import READY, RUNNING, STOPPING, STOPPED, RUNNING_STATES, PHASE, FREQUENCY, SIMULATED, \
FAST_AS_POSSIBLE, SYNC, BUFFER, DEBUG, INFO, WARN, ERROR, WALL_CLOCK, LATEST
from rex.input import Input
from rex.output import Output
from rex.utils import log
from rex.distributions import Distribution, Gaussian, GMM
import rex.proto.log_pb2 as log_pb2
class BaseNode:
def __init__(self, name: str, rate: float, delay_sim: Distribution, delay: float = None, advance: bool = True,
stateful: bool = True, log_level: int = WARN, color: str = "green"):
self.name = name
self.rate = rate
self.log_level = log_level
self.color = color
self.advance = advance
self.stateful = stateful
self.inputs: List[Input] = []
self.output = Output(self, self.log_level, self.color, delay, delay_sim)
# State and episode counter
self._eps = 0
self._state = STOPPED
# Executor
self._executor = ThreadPoolExecutor(max_workers=1, thread_name_prefix=name)
self._q_task: Deque[Tuple[Future, Callable, Any, Any]] = deque(maxlen=10)
self._lock = RLock()
# Reset every run
self._tick = None
self._record: log_pb2.NodeRecord = None
self._phase_scheduled = None
self._phase = None
self._phase_dist = None
self._sync = None
self._clock = None
self._scheduling = None
self._real_time_factor = 1.
# Set starting ts
self._ts_start = Future()
self._set_ts_start(0.)
self.q_tick: Deque[int] = None
self.q_ts_scheduled: Deque[Tuple[int, float]] = None
self.q_ts_output_prev: Deque[float] = None
self.q_ts_step: Deque[Tuple[int, float, float, log_pb2.StepRecord]] = None
self.q_rng_step: Deque[jnp.ndarray] = None
# Only used if no step and reset fn are provided
self._i = 0
if not 1/rate > self.output.phase:
self.log("WARNING", f"The sampling time ({1/rate=:.6f} s) is smaller than"
f" the output phase ({self.output.phase=:.6f} s)."
" This may lead to large (accumulating) delays.", WARN)
def warmup(self):
# Warms-up the jitted functions (i.e. pre-compiles)
# batch_split_rng(rnd.PRNGKey(0), self._jit_split, deque(), num=self._num_buffer) # Only to trigger jit compilation
# Warms-up jitted functions in the output (i.e. pre-compiles)
self.output.warmup()
# Warms-up jitted functions in the inputs (i.e. pre-compiles)
[i.warmup() for i in self.inputs]
@property
def record(self) -> log_pb2.NodeRecord:
if self._record is None:
record = log_pb2.NodeRecord(info=self.info)
record.inputs.extend([log_pb2.InputRecord(info=i.info) for i in self.inputs])
return record
else:
return self._record
@property
def eps(self) -> int:
return self._eps
@property
def phase(self) -> float:
"""Phase shift of the node: max phase over all incoming blocking & non-skipped connections."""
# Recalculate phase once per episode.
if self._phase is None:
try:
return max([0.] + [i.phase for i in self.inputs if i.blocking and not i.skip])
except RecursionError as e:
msg = "The constructed graph is not DAG. To break an algebraic loop, " \
"either skip a connection or make the connection non-blocking."
log(self.name, "red", ERROR, "ERROR", msg)
# exit()
raise e
else:
return self._phase
@property
def phase_dist(self) -> Distribution:
if self._phase_dist is None:
return Gaussian(self.phase)
else:
return self._phase_dist
@property
def info(self) -> log_pb2.NodeInfo:
info = log_pb2.NodeInfo(name=self.name, rate=self.rate, stateful=self.stateful, advance=self.advance, phase=self.phase,
delay_sim=self.output.delay_sim.info, delay=self.output.delay)
info.inputs.extend([i.info for i in self.inputs])
return info
@classmethod
def from_info(cls, info: log_pb2.NodeInfo, log_level: int = WARN, color: str = "green", **kwargs):
# Initializes a node from a NodeInfo proto log
node = cls(name=info.name, rate=info.rate, delay_sim=GMM.from_info(info.delay_sim), delay=info.delay, advance=info.advance,
stateful=info.stateful, log_level=log_level, color=color, **kwargs)
return node
def connect_from_info(self, info: log_pb2.InputInfo, node: "Node", log_level: Optional[int] = None, color: Optional[str] = None):
# Connects a node to another node from an InputInfo proto log
self.connect(node,
blocking=info.blocking,
skip=info.skip,
delay_sim=GMM.from_info(info.delay_sim),
delay=info.delay,
jitter=info.jitter,
name=info.name,
color=color,
log_level=log_level)
def log(self, id: str, value: Optional[Any] = None, log_level: Optional[int] = None):
log_level = log_level if isinstance(log_level, int) else self.log_level
log(self.name, self.color, min(log_level, self.log_level), id, value)
def _set_ts_start(self, ts_start: float):
assert isinstance(self._ts_start, Future)
self._ts_start.set_result(ts_start)
self._ts_start = ts_start
def _submit(self, fn, *args, stopping: bool = False, **kwargs):
with self._lock:
if self._state in [READY, RUNNING] or stopping:
f = self._executor.submit(fn, *args, **kwargs)
self._q_task.append((f, fn, args, kwargs))
f.add_done_callback(self._f_callback)
else:
self.log("SKIPPED", fn.__name__, log_level=DEBUG)
f = Future()
f.cancel()
return f
def _f_callback(self, f: Future):
e = f.exception()
if e is not None and e is not CancelledError:
error_msg = "".join(traceback.format_exception(None, e, e.__traceback__))
log(self.name, "red", ERROR, "ERROR", error_msg)
def now(self) -> Tuple[float, float]:
"""Get the passed time since according to the simulated and wall clock"""
# Determine starting timestamp
ts_start = self._ts_start
ts_start = ts_start.result() if isinstance(ts_start, Future) else ts_start
# Determine passed time
wc = time.time()
wc_passed = wc - ts_start
sc = wc_passed if self._real_time_factor == 0 else wc_passed * self._real_time_factor
return sc, wc_passed
def throttle(self, ts: float):
if self._real_time_factor not in [FAST_AS_POSSIBLE]:
# Determine starting timestamp
ts_start = self._ts_start
ts_start = ts_start.result() if isinstance(ts_start, Future) else ts_start
wc_passed_target = ts / self._real_time_factor
wc_passed = time.time() - ts_start
wc_sleep = max(0., wc_passed_target-wc_passed)
time.sleep(wc_sleep)
def connect(self, node: "Node", blocking: bool, delay_sim: Distribution, delay: float = None, window: int = 1, skip: bool = False,
jitter: int = LATEST, name: Optional[str] = None, log_level: Optional[int] = None, color: Optional[str] = None):
# Create new input
assert node.name not in [i.output.node.name for i in self.inputs], "Cannot use the same output source for more than one input."
log_level = log_level if isinstance(log_level, int) else self.log_level
color = color if isinstance(color, str) else self.color
name = name if isinstance(name, str) else node.output.name
i = Input(self, node.output, window, blocking, skip, jitter, delay, delay_sim, log_level, color, name)
self.inputs.append(i)
# Register the input with the output of the specified node
node.output.connect(i)
@abc.abstractmethod
def step(self, ts: jp.float32, step_state: StepState) -> Tuple[StepState, Output]:
raise NotImplementedError
def _reset(self, graph_state: GraphState, sync: int = SYNC, clock: int = SIMULATED, scheduling: int = PHASE, real_time_factor: Union[int, float] = FAST_AS_POSSIBLE):
assert self._state in [STOPPED, READY], f"{self.name} must first be stopped, before it can be reset"
assert not (clock in [WALL_CLOCK] and sync in [SYNC]), "You can only simulate synchronously, if the clock=`SIMULATED`."
# Save run configuration
self._sync = sync #: True if we must run synchronized
self._clock = clock #: Simulate timesteps
self._scheduling = scheduling #: Synchronization mode for step scheduling
self._real_time_factor = real_time_factor #: Scaling of simulation speed w.r.t wall clock
# Up the episode counter (must happen before resetting outputs & inputs)
self._eps += 1
# Reset every run
self._tick = 0
self._phase_scheduled = 0. #: Structural phase shift that the step scheduler takes into account
self._phase, self._phase_dist = None, None
self._phase = self.phase
self._phase_dist = self.phase_dist
self._record = None
self._step_state = graph_state.nodes[self.name]
# Set starting ts
self._ts_start = Future() #: The starting timestamp of the episode.
# Initialize empty queues
self.q_tick = deque()
self.q_ts_scheduled = deque()
self.q_ts_output_prev = deque()
self.q_ts_step = deque()
self.q_rng_step = deque()
# Get rng for delay sampling
# This is hacky because we reuse the seed.
# However, changing the seed of the step_state would break the reproducibility between graphs (compiled, async).
rng = self._step_state.rng
rng = rnd.PRNGKey(rng[0]) if isinstance(rng, onp.ndarray) else rng
# Reset output
rng_out, rng = rnd.split(rng, num=2)
self.output.reset(rng_out)
# Reset all inputs and output
rngs_in = rnd.split(rng, num=len(self.inputs))
[i.reset(r, self._step_state.inputs[i.input_name]) for r, i in zip(rngs_in, self.inputs)]
# Set running state
self._state = READY
self.log(RUNNING_STATES[self._state], log_level=DEBUG)
def _start(self, start: float):
assert self._state in [READY], f"{self.name} must first be reset, before it can start running."
# Set running state
self._state = RUNNING
self.log(RUNNING_STATES[self._state], log_level=DEBUG)
# Create logging record
self._set_ts_start(start)
self._record = log_pb2.NodeRecord(info=self.info, sync=self._sync, clock=self._clock, scheduling=self._scheduling,
real_time_factor=self._real_time_factor, ts_start=start)
# Start all inputs and output
[i.start(record=self._record.inputs.add()) for i in self.inputs]
self.output.start()
# Set first last_output_ts equal to phase (as if we just finished our previous output).
self.q_ts_output_prev.append(0.)
# Queue first two ticks (so that output_ts runs ahead of message)
# The number of tokens > 1 determines "how far" into the future the
# output timestamps are simulated when clock=simulated.
self.q_tick.extend((True, True))
# Push scheduled ts
_f = self._submit(self.push_scheduled_ts)
def _stop(self, timeout: Optional[float] = None) -> Future:
# Pass here, if we are not running
if self._state not in [RUNNING]:
self.log(f"{self.name} is not running, so it cannot be stopped.", log_level=DEBUG)
f = Future()
f.set_result(None)
return f
assert self._state in [RUNNING], f"Cannot stop, because {self.name} is currently not running."
def _stopping():
# Stop producing messages and communicate total number of sent messages
self.output.stop()
# Stop all channels to receive all sent messages from their connected outputs
[i.stop().result(timeout=timeout) for i in self.inputs]
# Set running state
self._state = STOPPED
self.log(RUNNING_STATES[self._state], log_level=DEBUG)
with self._lock:
# Then, flip running state so that no more tasks can be scheduled
# This means that
self._state = STOPPING
self.log(RUNNING_STATES[self._state], log_level=DEBUG)
# First, submit _stopping task
f = self._submit(_stopping, stopping=True)
return f
# @synchronized(RLock())
def push_scheduled_ts(self):
# Only run if there are elements in q_tick
has_tick = len(self.q_tick) > 0
if has_tick:
# Remove token from tick queue (not used)
_ = self.q_tick.popleft()
# Determine tick and increment
tick = self._tick
self._tick += 1
# Calculate scheduled ts
# Is unaffected by scheduling delays, i.e. assumes the zero-delay situation.
scheduled_ts = round(tick / self.rate + self.phase, 6)
# Log
self.log("push_scheduled_ts", f"tick={tick} | scheduled_ts={scheduled_ts: .2f}", log_level=DEBUG)
# Queue expected next step ts and wait for blocking delays to be determined
self.q_ts_scheduled.append((tick, scheduled_ts))
self.push_phase_shift()
# Push next step ts event to blocking connections (does not throttle)
for i in self.inputs:
if not i.blocking:
continue
i.q_ts_next_step.append((tick, scheduled_ts))
# Push expect (must be called from input thread)
i._submit(i.push_expected_blocking)
# @synchronized(RLock())
def push_phase_shift(self):
# If all blocking delays are known, and we know the expected next step timestamp
has_all_ts_max = all([len(i.q_ts_max) > 0 for i in self.inputs if i.blocking])
has_scheduled_ts = len(self.q_ts_scheduled) > 0
has_last_output_ts = len(self.q_ts_output_prev) > 0
if has_scheduled_ts and has_last_output_ts and has_all_ts_max:
self.log("push_phase_shift", log_level=DEBUG)
# Grab blocking delays from queues and calculate max delay
ts_max = [i.q_ts_max.popleft() for i in self.inputs if i.blocking]
ts_max = max(ts_max) if len(ts_max) > 0 else 0.
# Grab next scheduled step ts (without considering phase_scheduling shift)
tick, ts_scheduled = self.q_ts_scheduled.popleft()
# Grab previous output ts
ts_output_prev = self.q_ts_output_prev.popleft()
# Calculate sources of phase shift
only_blocking = self.advance and all(i.blocking for i in self.inputs)
phase_inputs = ts_max - ts_scheduled
phase_last = ts_output_prev - ts_scheduled
phase_scheduled = self._phase_scheduled
# Calculate phase shift
# If only blocking connections, phase is not determined by phase_scheduled
phase = max(phase_inputs, phase_last) if only_blocking else max(phase_inputs, phase_last, phase_scheduled)
# Update structural scheduling phase shift
if self._scheduling in [FREQUENCY]:
self._phase_scheduled += max(0, phase_last-phase_scheduled)
else: # self._scheduling in [PHASE]
self._phase_scheduled = 0.
# Calculate starting timestamp for the step call
ts_step = ts_scheduled + phase
# Sample delay if we simulate the clock
delay = self.output.sample_delay() if self._clock in [SIMULATED] else None
# Create step record
record_step = log_pb2.StepRecord(tick=tick, ts_scheduled=ts_scheduled, ts_max=ts_max, ts_output_prev=ts_output_prev,
ts_step=ts_step, phase=phase, phase_scheduled=phase_scheduled,
phase_inputs=phase_inputs, phase_last=phase_last)
self.q_ts_step.append((tick, ts_step, delay, record_step))
# Predetermine output timestamp when we simulate the clock
if self._clock in [SIMULATED]:
# Determine output timestamp
ts_output = ts_step+delay
_, ts_output_wc = self.now()
header = log_pb2.Header(eps=self._eps, seq=tick, ts=log_pb2.Time(sc=ts_output, wc=ts_output_wc))
self.output.push_ts_output(ts_output, header)
# Add previous output timestamp to queue
self.q_ts_output_prev.append(ts_output)
# Simulate output timestamps into the future
# If we use the wall-clock, ts_output_prev is queued after the step in push_step
_f = self._submit(self.push_scheduled_ts)
# Only throttle if we have non-blocking connections
if any(not i.blocking for i in self.inputs) or not self.advance:
# todo: This also throttles when running synced. Correct?
self.throttle(ts_step)
# Push for step (will never trigger here if there are non-blocking connections).
self.push_step()
# Push next step timestamp to non-blocking connections
for i in self.inputs:
if i.blocking:
continue
i.q_ts_next_step.append((tick, ts_step))
# Push expect (must be called from input thread)
i._submit(i.push_expected_nonblocking)
# @synchronized(RLock())
def push_step(self):
has_grouped = all([len(i.q_grouped) > 0 for i in self.inputs])
has_ts_step = len(self.q_ts_step) > 0
if has_ts_step and has_grouped:
self.log("push_step", log_level=DEBUG)
# Grab next expected step ts and step record
tick, ts_step_sc, delay_sc, record_step = self.q_ts_step.popleft()
# Actual step start ts
# todo: ts_step_wc should also be inferred in push_phase_shift when running ASYNC (using wall clock).
_, ts_step_wc = self.now()
# Grab grouped msgs
inputs = FrozenDict({i.input_name: i.q_grouped.popleft() for i in self.inputs})
# Update StepState with grouped messages
step_state = self._step_state.replace(inputs=inputs)
# Run step and get msg
new_step_state, output = self.step(jp.float32(ts_step_sc), step_state)
# Update step_state
self._step_state = new_step_state
# Determine output timestamp
if self._clock in [SIMULATED]:
assert delay_sc is not None
ts_output_sc = ts_step_sc + delay_sc
_, ts_output_wc = self.now()
else:
assert delay_sc is None
ts_output_sc, ts_output_wc = self.now()
delay_sc = ts_output_sc - ts_step_sc
assert delay_sc >= 0, "delay cannot be negative"
# Add previous output timestamp to queue
# If we simulate the clock, ts_output_prev is already queued in push_phase_shift
self.q_ts_output_prev.append(ts_output_sc)
# Throttle to timestamp
self.throttle(ts_output_sc)
# Create header with timing information on output
header = log_pb2.Header(eps=self._eps, seq=tick, ts=log_pb2.Time(sc=ts_output_sc, wc=ts_output_wc))
# Log sent times
record_step.sent.CopyFrom(header)
record_step.delay = delay_sc
record_step.ts_output = ts_output_sc
record_step.comp_delay.CopyFrom(log_pb2.Time(sc=ts_output_sc-ts_step_sc, wc=ts_output_wc-ts_step_wc))
# Push output
if output is not None: # Agent returns None when we are stopping/resetting.
self.output.push_output(output, header)
# Add step record
self._record.steps.append(record_step)
# Only schedule next step if we are running
if self._state in [RUNNING]:
# Add token to tick queue (ticks are incremented in push_scheduled_ts function)
self.q_tick.append(True)
# Schedule next step (does not consider scheduling shifts)
_f = self._submit(self.push_scheduled_ts)
class Node(BaseNode):
def default_params(self, rng: jp.ndarray, graph_state: GraphState = None) -> Params:
"""Default params of the node."""
raise NotImplementedError
def default_state(self, rng: jp.ndarray, graph_state: GraphState = None) -> State:
"""Default state of the node."""
raise NotImplementedError
def default_inputs(self, rng: jp.ndarray, graph_state: GraphState = None) -> FrozenDict[str, InputState]: #Dict[str, InputState]:
"""Default inputs of the node."""
rngs = jp.random_split(rng, num=len(self.inputs))
inputs = dict()
for i, rng_output in zip(self.inputs, rngs):
window = i.window
seq = jp.arange(-window, 0, dtype=jp.int32)
ts_sent = 0 * jp.arange(-window, 0, dtype=jp.float32)
ts_recv = 0 * jp.arange(-window, 0, dtype=jp.float32)
outputs = [i.output.node.default_output(rng_output, graph_state) for _ in range(window)]
inputs[i.input_name] = InputState.from_outputs(seq, ts_sent, ts_recv, outputs)
return FrozenDict(inputs)
@abc.abstractmethod
def default_output(self, rng: jp.ndarray, graph_state: GraphState = None) -> Output:
"""Default output of the node."""
raise NotImplementedError
@abc.abstractmethod
def reset(self, rng: jp.ndarray, graph_state: GraphState = None) -> StepState:
raise NotImplementedError
@abc.abstractmethod
def step(self, ts: jp.float32, step_state: StepState) -> Tuple[StepState, Output]:
raise NotImplementedError | /rex_lib-0.0.1-py3-none-any.whl/rex/node.py | 0.556641 | 0.210502 | node.py | pypi |
from typing import Dict, List, Union, Tuple
import sys
from rex.proto import log_pb2
class RecursionDepth:
def __init__(self, limit):
self.limit = limit
self.default_limit = sys.getrecursionlimit()
def __enter__(self):
sys.setrecursionlimit(max(self.default_limit, self.limit))
def __exit__(self, type, value, traceback):
sys.setrecursionlimit(self.default_limit)
class Step:
def __init__(self, record: log_pb2.NodeRecord, tick: int):
"""
:param record: The node record.
:param tick: The tick of the step.
:param static: Whether the step is static or not. This means that the parameters of the step are not changed over time.
"""
self._tick = tick
self._record = record
self._info = record.info
self._inputs = {i.info.name: i for i in self._record.inputs}
self._window = {i.info.output: i.info.window for i in self._record.inputs}
self._stateful = record.info.stateful
self._step = record.steps[tick]
# These variables are set in the .reset() method.
self._isolate = None
self._upstream_state: log_pb2.Dependency = None
self._upstream_inputs: Dict[str, List[log_pb2.Dependency]] = None
self._steptrace: log_pb2.TracedStep = None
@property
def steptrace(self) -> log_pb2.TracedStep:
"""Makes a copy of the step trace."""
assert self._steptrace is not None, "You must first trace before you can extract the steptrace."
steptrace = log_pb2.TracedStep()
steptrace.CopyFrom(self._steptrace)
if self._upstream_state is not None:
steptrace.upstream.append(self._upstream_state)
# Add upstream input dependencies
[steptrace.upstream.extend(dependencies) for _, dependencies in self._upstream_inputs.items()]
return steptrace
def reset(self, steps: Dict[str, List["Step"]], static: bool, deps: Dict[str, List[bool]] = None, isolate: bool = True):
"""Reset the step trace.
:param steps:
:param static:
:param deps:
"""
# Whether to isolate the step trace in a separate depth or not.
self._isolate = isolate
# Create step trace
self._steptrace = log_pb2.TracedStep(used=False, stateful=self._stateful, static=static, isolate=isolate, name=self._info.name, tick=self._tick, ts_step=self._step.ts_step)
# Determine if this step will be used (derived from a previous trace).
is_used = deps[self._info.name][self._tick] if deps is not None else False
# Determine previous call index that is used
prev_tick = self._tick - 1
excluded_ticks = []
if is_used:
for idx, prev_is_used in reversed(list(enumerate(deps[self._info.name][:self._tick]))):
prev_tick = idx
if prev_is_used:
break
else:
excluded_ticks.append(idx)
# Does this step have an upstream state dependency
depends_on_prev_state = prev_tick >= 0
if (not self._stateful and static and not is_used) or prev_tick in excluded_ticks:
depends_on_prev_state = False
# Prepare upstream state dependency
if depends_on_prev_state:
prev_source_ts = steps[self._info.name][prev_tick]._step.ts_output
source = log_pb2.Source(name=self._info.name, tick=prev_tick, ts=prev_source_ts)
target = log_pb2.Target(name=self._info.name, input_name=None, tick=self._tick, ts=self._step.ts_step)
self._upstream_state = log_pb2.Dependency(used=False, source=source, target=target)
else:
self._upstream_state = None
# Reroute upstream dependencies from previous excluded steps
self._upstream_inputs = {}
for idx in reversed(excluded_ticks):
excl_step = steps[self._info.name][idx]
for input_name, i in excl_step._inputs.items():
self._upstream_inputs[i.info.output] = self._upstream_inputs.get(i.info.output, [])
for m in i.grouped[excl_step._tick].messages:
source = log_pb2.Source(name=i.info.output, tick=m.sent.seq, ts=m.sent.ts.sc)
rerouted = log_pb2.Target(name=self._info.name, input_name=input_name, tick=excl_step._tick, ts=m.received.ts.sc)
target = log_pb2.Target(name=self._info.name, input_name=input_name, tick=self._tick, ts=m.received.ts.sc, rerouted=rerouted)
d = log_pb2.Dependency(used=False, source=source, target=target)
self._upstream_inputs[i.info.output].append(d)
# Prepare upstream input dependencies
for input_name, i in self._inputs.items():
self._upstream_inputs[i.info.output] = self._upstream_inputs.get(i.info.output, [])
for m in i.grouped[self._tick].messages:
source = log_pb2.Source(name=i.info.output, tick=m.sent.seq, ts=m.sent.ts.sc)
# todo: Determine (efficiently) next tick if this message is excluded.
# Do not iterate over all deps for next uses stepp call,
# because this will be very inefficient for completely excluded nodes.
if not is_used and deps is not None:
rerouted = log_pb2.Target(name="REROUTED", input_name=input_name, tick=self._tick, ts=m.received.ts.sc)
else:
rerouted = None
target = log_pb2.Target(name=self._info.name, input_name=input_name, tick=self._tick, ts=m.received.ts.sc, rerouted=rerouted)
d = log_pb2.Dependency(used=False, source=source, target=target)
self._upstream_inputs[i.info.output].append(d)
def upstream(self, steps: Dict[str, List["Step"]], index: List[int], output: str, num: int) -> int:
"""Add upstream dependencies to the trace."""
dependencies = self._upstream_inputs[output]
depths: List[int] = [0]
# Determine the number of dependencies to add
num_add = min(len(dependencies), num)
num_upstream = max(0, num - len(dependencies))
# Add input dependencies that were received right before this tick
assert num_add + num_upstream == num
for idx, d in enumerate(dependencies[-num_add:]): # Only add dependencies up to num size.
if not d.used: # Only add dependency if it was not previously added (if node is stateless)
_, depth = steps[output][d.source.tick]._trace(steps, index, d)
depths.append(depth)
# Add upstream input dependencies that were received in previous ticks
prev_tick = self._tick - 1
if num_upstream > 0 and prev_tick >= 0:
depth = steps[self._info.name][prev_tick].upstream(steps, index, output, num_upstream)
depths.append(depth)
return max(depths)
def _trace(self, steps: Dict[str, List["Step"]], index: List[int], dependency: log_pb2.Dependency, final: bool = False) -> Tuple[int, int]:
"""An intermediate trace function that is called recursively."""
if not self._steptrace.used:
depths: List[int] = [0]
# Only trace previous step as dependency if stateful
if self._upstream_state is not None:
if not self._upstream_state.used:
_, depth = steps[self._info.name][self._upstream_state.source.tick]._trace(steps, index, self._upstream_state)
depths.append(depth)
# Trace inputs
# Note: should happen after having added the state dependency, else stateless nodes may have their steps
# scheduled in non-chronologically order, that in turn, will mess up the circular buffers.
for output_name, dependencies in self._upstream_inputs.items():
w = self._window[output_name]
depth = self.upstream(steps, index, output_name, w)
depths.append(depth)
self._steptrace.used = True
self._steptrace.index = index[0]
self._steptrace.depth = max(depths) + 1 - int(self._isolate)
# u = self._steptrace
# print(f"{u.name=} | {u.tick=} | {u.depth=} ")
index[0] += 1
if not final:
assert not dependency.used, "Cannot add a dependency that was previously added."
source = dependency.source
assert source.name == self._info.name, f"Names do not match: {source.name=} vs {self._info.name=}."
assert source.tick == self._tick, f"Ticks do not match: {source.tick=} vs {self._tick=}."
assert source.ts == self._step.ts_output, f"Timestamps do not match: {source.ts=} vs {self._step.ts_output=}."
dependency.used = True
self._steptrace.downstream.append(dependency)
return index[0], self._steptrace.depth
def trace(self, steps: Dict[str, List["Step"]], static: Union[bool, Dict[str, bool]] = False, isolate: bool = True) -> log_pb2.TraceRecord:
"""Traces the step and returns a TraceRecord."""
if isinstance(static, bool):
static = {s: static for s in steps}
# Prepare steps
num_steps = sum([len(u) for _, u in steps.items()])
# Initial trace (with static=True), does not respect chronological order.
isolate = {name: isolate if name == self._info.name else False for name in steps}
[[s.reset(steps, static=static.get(s._info.name, False), isolate=isolate.get(s._info.name)) for s in lst] for n, lst in steps.items()]
with RecursionDepth(num_steps):
_end, _end_depth = self._trace(steps, [0], log_pb2.Dependency(used=True), final=True)
# Get dependencies
deps = {name: [s._steptrace.used for s in lst] for name, lst in steps.items()}
# Re-trace (with static=Optional[True]), does respect chronological order if we provide deps.
[[s.reset(steps, static=static.get(s._info.name, False), deps=deps, isolate=isolate.get(s._info.name)) for s in lst] for _, lst in steps.items()]
with RecursionDepth(num_steps):
end, end_depth = self._trace(steps, [0], log_pb2.Dependency(used=True), final=True)
# Gather traceback
traceback = log_pb2.TraceRecord()
# Store step from which we started tracing.
traceback.trace.CopyFrom(self._step)
# Sort the traced steps
use: List[log_pb2.TracedStep] = end * [None]
excluded = []
pruned = [name for name in steps]
depths = [[]for i in range(0, 1+_end_depth)]
for _name, lst in steps.items():
for step in lst:
s = step._steptrace
if s.used:
# Remove name from pruned list if it was used.
if s.name in pruned:
pruned.remove(s.name)
# Check validity
for d in s.downstream:
assert d.used, "Downstream dependency must be used."
assert s.index < end, "Index violates the bounds"
assert use[s.index] is None, "Duplicate index"
# Add to use list at right index
# Makes a copy below, so changes later on to _steptrace.downstream will not get through.
use[s.index] = step.steptrace
depths[s.depth].append(use[s.index]) # NOTE! This connects the steptraces in use and depths
else:
assert all([not d.used for d in s.upstream]), "Excluded steptrace must not have used upstream dependencies."
assert len(s.downstream) == 0, "Removed steps cannot have downstream dependencies"
excluded.append(step.steptrace)
# Puts isolated steps in their own depth
new_depths = []
for i, depth in enumerate(depths):
if len(depth) == 0:
assert i == 0, "Depth must be 0 if empty"
continue
# Check if we have isolated steps
isolated_steps = [u for u in depth if u.isolate]
other_steps = [u for u in depth if not u.isolate]
# First add the other steps
if len(other_steps) > 0:
new_depths.append(other_steps)
# Then add the isolated steps (if any, in chronological order)
isolated_steps.sort(key=lambda u: u.index)
for u in isolated_steps:
new_depths.append([u])
# Update depth and topological indices
consecutive = 0
max_consecutive = 0
topological_index = 0
for i, depth in enumerate(new_depths):
# Update depths (takes the isolated depth offset into account)
for u in depth:
u.depth = i
u.index = topological_index
topological_index += 1
# Count max sequential steps without an isolated step
has_isolated = any([u.isolate for u in depth])
if has_isolated:
max_consecutive = max(max_consecutive, consecutive)
consecutive = 0
else:
consecutive += 1
# Update depths to be new_depths (which takes isolated steps into account)
depths = new_depths
# Sort used steps by updated index
use.sort(key=lambda u: u.index)
# Check validity (according to depth)
monotone_ticks = {name: -1 for name in steps}
for i, depth in enumerate(depths):
names = [u.name for u in depth if not isolate.get(u.name)]
assert len(names) == len(set(names)), f"Duplicate names in depth {i}: {names}"
for u in depth:
# Verify that all upstream dependencies are evaluated before this step
for d in u.upstream:
if d.used:
_has_run = False
for depthr in reversed(depths[:i]):
for ur in depthr:
if ur.name == d.source.name and ur.tick == d.source.tick:
_has_run = True
break
if _has_run:
break
assert _has_run, f"Upstream dependency {d.source.name} {d.source.tick} not found in previous depths."
# Verify that all downstream dependencies are evaluated after this step
for d in u.downstream:
assert d.used, "Downstream dependency must be used."
_has_run = False
for depthr in depths[i+1:]:
for ur in depthr:
if ur.name == d.target.name and ur.tick == d.target.tick:
_has_run = True
break
if _has_run:
break
assert monotone_ticks[u.name] < u.tick, f"Steps of node `{u.name}` are scheduled in non-chronological order."
monotone_ticks[u.name] = u.tick
# Check validity (according to topological order
monotone_ticks = {name: -1 for name in steps}
for i, u in enumerate(use):
assert u is not None, f"Missing step in used trace for index {i}"
# Verify that all upstream dependencies are evaluated before this step
for d in u.upstream:
if d.used:
_has_run = False
for ur in reversed(use[:i]):
if ur.name == d.source.name and ur.tick == d.source.tick:
_has_run = True
break
assert _has_run, f"Upstream dependency {d.source.name} {d.source.tick} not evaluated before {u.name} {u.tick}."
# Verify that all downstream dependencies are evaluated after this step
for d in u.downstream:
assert d.used, "Downstream dependency must be used."
_has_run = False
for ur in use[i:]:
if ur.name == d.target.name and ur.tick == d.target.tick:
_has_run = True
break
assert _has_run, f"Downstream dependency {d.target.name} {d.target.tick} not evaluated after {u.name} {u.tick}."
# Check that ticks of step traces are monotonically increasing per node
assert monotone_ticks[u.name] < u.tick, f"Steps of node `{u.name}` are scheduled in non-chronological order."
monotone_ticks[u.name] = u.tick
# Add to traceback
traceback.name = self._info.name
traceback.max_depth = depths[-1][0].depth
traceback.isolate = any(isolate.values())
traceback.max_consecutive = max_consecutive
traceback.pruned.extend(pruned)
traceback.used.extend(use)
traceback.excluded.extend(excluded)
return traceback
def trace(record: log_pb2.EpisodeRecord, name: str, tick: int = -1, static: Union[bool, Dict[str, bool]] = False, verbose: bool = True, isolate: bool = True) -> log_pb2.TraceRecord:
""" Trace a step in the episode record.
:param record: The episode record to trace.
:param name: The name of the node to trace.
:param tick: The tick of the step to trace.
:param static: Whether to trace static dependencies. If a dictionary is passed, it is used to specify the static dependencies per node.
:param verbose: Whether to print the trace.
:param isolate: Whether to isolate the traced node in a separate depth.
"""
# Prepare steps of work
fn_generate_steps = lambda record: [Step(record, tick=tick) for tick in range(0, len(record.steps))]
steps: Dict[str, List["Step"]] = {n.info.name: fn_generate_steps(n) for n in record.node}
# Trace step
tick = tick % len(steps[name])
record_trace = steps[name][tick].trace(steps, static=static, isolate=isolate)
# Add node info
record_trace.node.extend([n.info for n in record.node])
record_trace.episode.CopyFrom(record)
# Analyze traced steps
if verbose:
# todo: analyze per node how many steps are used and dependencies are required.
max_consecutive = record_trace.max_consecutive
max_depth = max([u.depth for u in record_trace.used])
num_nodes = len(record.node)
num_used_nodes = num_nodes - len(record_trace.pruned)
num_isolated = int(isolate)
num_other = num_used_nodes - num_isolated
num_used = len(record_trace.used)
num_excluded = len(record_trace.excluded)
num_all = num_used + num_excluded
deps_used = 0
deps_excluded = 0
depths = [[] for _ in range(max_depth + 1)]
num_depths = len(depths)
num_isolated = tick + 1 if isolate else 0
for t in record_trace.used:
# Count used & excluded dependencies
for d in t.downstream:
if d.used:
deps_used += 1
for d in t.upstream:
if not d.used:
deps_excluded += 1
for t in record_trace.excluded:
for d in t.upstream:
if not d.used:
deps_excluded += 1
deps_used += len(t.downstream)
deps_all = deps_used + deps_excluded
seq_steps = f"{num_used}/{num_all}"
seq_deps = f"{deps_used}/{deps_all}"
batch_steps = f"{num_isolated + (num_depths-num_isolated) * num_other}/{num_all}"
if isolate:
vec_steps = f"{num_isolated + (num_isolated-1)*max_consecutive * num_other}/{num_all} ({max_consecutive=})"
else:
vec_steps = "N/A"
# NOTE: for batch and vec, the number of steps means step calls. Not to be confused with the number of environment steps.
print(f"Trace | {name=} | {tick=} | deps (seq): {seq_deps} | step (seq): {seq_steps} | steps (batch): {batch_steps} | steps (vec): {vec_steps}")
return record_trace | /rex_lib-0.0.1-py3-none-any.whl/rex/tracer.py | 0.432303 | 0.401336 | tracer.py | pypi |
from rex.proto import log_pb2
from typing import Union, List, Tuple
import jax.numpy as jnp # todo: replace with from brax import jumpy as jp.ndarray?
from tensorflow_probability.substrates import jax as tfp # Import tensorflow_probability with jax backend
tfd = tfp.distributions
class Gaussian:
def __init__(self, mean: float, var: float = 0, percentile: float = 0.01):
assert mean >= 0, "Mean must be non-negative"
assert var >= 0, "var must be non-negative"
assert percentile > 0, "There must be a truncating percentile > 0."
self._mean = mean
self._var = var
self._std = var ** (1/2)
self._percentile = percentile
self._low = tfd.Normal(loc=mean, scale=var).quantile(percentile).tolist()
self._high = tfd.Normal(loc=mean, scale=var).quantile(1-percentile).tolist()
if var > 0:
self._dist = tfd.TruncatedNormal(loc=mean, scale=var, low=self._low, high=self._high)
else:
self._dist = tfd.Deterministic(loc=mean)
# Verify that the delay is always non-negative
assert self._low >= 0, "Samples should always be positive."
def __repr__(self):
return f"Gaussian | {1.0: .2f}*N({self.mean: .4f}, {self.var: .4f}) | percentile={self.percentile}"
def __add__(self, other: "Distribution"):
"""Summation of two distributions"""
if isinstance(other, Gaussian):
mean = self.mean + other.mean
var = self.var + other.var
percentile = max(self.percentile, other.percentile)
return Gaussian(mean, var, percentile=percentile)
elif isinstance(other, GMM):
return other + self
else:
raise NotImplementedError("Not yet implemented")
def pdf(self, x: jnp.ndarray):
return self._dist.prob(x)
def cdf(self, x: jnp.ndarray):
return self._dist.cdf(x)
def sample(self, rng: jnp.ndarray, shape: Union[int, Tuple] = None):
if shape is None:
shape = ()
return self._dist.sample(sample_shape=shape, seed=rng)
@classmethod
def from_info(cls, info: Union[log_pb2.GMM, log_pb2.Gaussian]):
if isinstance(info, log_pb2.GMM):
assert len(info.gaussians) == 1, "The GMM log should only contain a single Gaussian."
info = info.gaussians[0]
mean, var, percentile = info.mean, info.var, info.percentile
return cls(mean, var, percentile)
@property
def info(self) -> log_pb2.GMM:
info = log_pb2.GMM()
g = log_pb2.Gaussian(weight=1, mean=self.mean, var=self.var, percentile=self.percentile, low=self.low, high=self.high)
info.gaussians.append(g)
return info
@property
def percentile(self):
return self._percentile
@property
def mean(self):
return self._mean
@property
def var(self):
return self._var
@property
def low(self):
return self._low
@property
def high(self):
return self._high
class GMM:
def __init__(self, gaussians: List["Gaussian"], weights: List[float]):
assert len(gaussians) > 0, "Must specify at least 1 Gaussian."
assert len(gaussians) == len(weights), "Must provide an equal number of weights and Gaussians"
assert all([w > 0 for w in weights]), "All weights must be positive."
self._weights = [w / sum(weights) for w in weights]
self._gaussians = gaussians
# Check if distributions are from the same family
deterministic = [v == 0 for v in self.vars]
assert all(deterministic) or not any(deterministic), "Either all distributions must be deterministic (ie var=0) or stochastic (var>0)"
if all(deterministic):
self._dist = tfd.MixtureSameFamily(mixture_distribution=tfd.Categorical(probs=self._weights),
components_distribution=tfd.Deterministic(loc=self.means))
else:
self._dist = tfd.MixtureSameFamily(mixture_distribution=tfd.Categorical(probs=self._weights),
components_distribution=tfd.TruncatedNormal(loc=self.means,
scale=self.vars,
low=[g.low for g in self._gaussians],
high=[g.high for g in self._gaussians]))
def __repr__(self):
msg = " | ".join([f"{w: .2f}*N({m: .4f}, {v: .4f})" for w, m, v in zip(self.weights, self.means, self.vars)])
return f"GMM | {msg} | percentile={self.percentile}"
def __add__(self, other: "Distribution"):
# Convert to GMM
if isinstance(other, Gaussian):
other = GMM([other], weights=[1.0])
# Only compatible with Gaussian or GMM
if not isinstance(other, GMM):
raise NotImplementedError("Not yet implemented")
percentile = max(other.percentile, self.percentile)
gaussians, weights = [], []
for w, m, v in zip(self.weights, self.means, self.vars):
for ow, om, ov in zip(other.weights, other.means, other.vars):
weights.append(w*ow)
gaussians.append(Gaussian(m + om, v + ov, percentile=percentile))
return GMM(gaussians, weights)
def pdf(self, x: jnp.ndarray):
return self._dist.prob(x)
def cdf(self, x: jnp.ndarray):
return self._dist.cdf(x)
def sample(self, rng: jnp.ndarray, shape: Union[int, Tuple] = None):
if shape is None:
shape = ()
return self._dist.sample(sample_shape=shape, seed=rng)
@property
def info(self) -> log_pb2.GMM:
info = log_pb2.GMM()
for w, g in zip(self.weights, self._gaussians):
ginfo = g.info.gaussians[0]
ginfo.weight = w
info.gaussians.append(ginfo)
return info
@classmethod
def from_info(cls, info: log_pb2.GMM):
weights = []
gaussians = []
for g in info.gaussians:
weights.append(g.weight)
gaussians.append(Gaussian.from_info(g))
return cls(gaussians, weights)
@property
def percentile(self):
return max([g.percentile for g in self._gaussians])
@property
def weights(self):
return self._weights
@property
def means(self):
return [g.mean for g in self._gaussians]
@property
def vars(self):
return [g.var for g in self._gaussians]
@property
def low(self):
return min([g.low for g in self._gaussians])
@property
def high(self):
return max([g.high for g in self._gaussians])
Distribution = Union[Gaussian, GMM] | /rex_lib-0.0.1-py3-none-any.whl/rex/distributions.py | 0.754373 | 0.569374 | distributions.py | pypi |
import rex.utils as utils
from typing import Dict, Tuple
EDGE_INDEX = 9
FACE_INDEX = 6
CANVAS_INDEX = 2
CWHEEL = {
"white": 10*["#ffffff"],
"black": 10*["#000000"],
"gray": [
"#f8f9fa",
"#f1f3f5",
"#e9ecef",
"#dee2e6",
"#ced4da",
"#adb5bd",
"#868e96",
"#495057",
"#343a40",
"#212529"
],
"red": [
"#fff5f5",
"#ffe3e3",
"#ffc9c9",
"#ffa8a8",
"#ff8787",
"#ff6b6b",
"#fa5252",
"#f03e3e",
"#e03131",
"#c92a2a"
],
"pink": [
"#fff0f6",
"#ffdeeb",
"#fcc2d7",
"#faa2c1",
"#f783ac",
"#f06595",
"#e64980",
"#d6336c",
"#c2255c",
"#a61e4d"
],
"grape": [
"#f8f0fc",
"#f3d9fa",
"#eebefa",
"#e599f7",
"#da77f2",
"#cc5de8",
"#be4bdb",
"#ae3ec9",
"#9c36b5",
"#862e9c"
],
"violet": [
"#f3f0ff",
"#e5dbff",
"#d0bfff",
"#b197fc",
"#9775fa",
"#845ef7",
"#7950f2",
"#7048e8",
"#6741d9",
"#5f3dc4"
],
"indigo": [
"#edf2ff",
"#dbe4ff",
"#bac8ff",
"#91a7ff",
"#748ffc",
"#5c7cfa",
"#4c6ef5",
"#4263eb",
"#3b5bdb",
"#364fc7"
],
"blue": [
"#e7f5ff",
"#d0ebff",
"#a5d8ff",
"#74c0fc",
"#4dabf7",
"#339af0",
"#228be6",
"#1c7ed6",
"#1971c2",
"#1864ab"
],
"cyan": [
"#e3fafc",
"#c5f6fa",
"#99e9f2",
"#66d9e8",
"#3bc9db",
"#22b8cf",
"#15aabf",
"#1098ad",
"#0c8599",
"#0b7285"
],
"teal": [
"#e6fcf5",
"#c3fae8",
"#96f2d7",
"#63e6be",
"#38d9a9",
"#20c997",
"#12b886",
"#0ca678",
"#099268",
"#087f5b"
],
"green": [
"#ebfbee",
"#d3f9d8",
"#b2f2bb",
"#8ce99a",
"#69db7c",
"#51cf66",
"#40c057",
"#37b24d",
"#2f9e44",
"#2b8a3e"
],
"lime": [
"#f4fce3",
"#e9fac8",
"#d8f5a2",
"#c0eb75",
"#a9e34b",
"#94d82d",
"#82c91e",
"#74b816",
"#66a80f",
"#5c940d"
],
"yellow": [
"#fff9db",
"#fff3bf",
"#ffec99",
"#ffe066",
"#ffd43b",
"#fcc419",
"#fab005",
"#f59f00",
"#f08c00",
"#e67700"
],
"orange": [
"#fff4e6",
"#ffe8cc",
"#ffd8a8",
"#ffc078",
"#ffa94d",
"#ff922b",
"#fd7e14",
"#f76707",
"#e8590c",
"#d9480f"
]
}
def ecolor_fn(name: str, index=EDGE_INDEX):
assert name in CWHEEL, f"Color not found: {name}. Available colors: {list(CWHEEL.keys())}."
return CWHEEL[name][index]
def fcolor_fn(name: str, index=FACE_INDEX):
assert name in CWHEEL, f"Color not found: {name}. Available colors: {list(CWHEEL.keys())}."
return CWHEEL[name][index]
def ccolor(name: str, index=CANVAS_INDEX):
assert name in CWHEEL, f"Color not found: {name}. Available colors: {list(CWHEEL.keys())}."
return CWHEEL[name][index]
def cscheme_fn(cscheme: Dict[str, str], edge_index=EDGE_INDEX, face_index=FACE_INDEX) -> Tuple[utils.AttrDict, utils.AttrDict]:
"""Create a color scheme from a dictionary of colors."""
edge_colors = utils.AttrDict()
face_colors = utils.AttrDict()
for name, color in cscheme.items():
edge_colors[name] = ecolor_fn(color, index=edge_index)
face_colors[name] = fcolor_fn(color, index=face_index)
return edge_colors, face_colors
# Determine default color scheme
default_cscheme = {"computation": "blue",
"phase": "yellow",
"advanced": "green",
"communication": "cyan",
"sleep": "gray",
"delay": "red",
"scheduled": "black",
"phase_input": "yellow",
"excluded": "red", # Used steps
"used": "gray", # Removed steps
"rerouted": "orange", # Rerouted dependency
"skip": "green", # Normal dependency
"normal": "gray", # Normal dependency
}
ecolor, fcolor = cscheme_fn(default_cscheme) | /rex_lib-0.0.1-py3-none-any.whl/rex/open_colors.py | 0.555676 | 0.296221 | open_colors.py | pypi |
from functools import partial
import jax
from typing import Any, Union, List, TypeVar
from flax import struct
from flax.core import FrozenDict
import jumpy as jp
import rex.jumpy as rjp
Output = TypeVar('Output')
State = TypeVar('State')
Params = TypeVar('Params')
@struct.dataclass
class InputState:
"""A ring buffer that holds the inputs for a node's input channel."""
seq: jp.ndarray
ts_sent: jp.ndarray
ts_recv: jp.ndarray
data: Output # --> must be a pytree where the shape of every leaf will become (size, *leafs.shape)
@classmethod
def from_outputs(cls, seq: jp.ndarray, ts_sent: jp.ndarray, ts_recv: jp.ndarray, outputs: List[Any]) -> "InputState":
"""Create an InputState from a list of outputs.
The oldest message should be the first in the list.
"""
data = jp.tree_map(lambda *o: jp.stack(o, axis=0), *outputs)
return cls(seq=seq, ts_sent=ts_sent, ts_recv=ts_recv, data=data)
def _shift(self, a: jp.ndarray, new: jp.ndarray):
rolled_a = jp.roll(a, -1, axis=0)
new_a = rjp.index_update(rolled_a, -1, new, copy=True)
return new_a
# @partial(jax.jit, static_argnums=(0,))
def push(self, seq: int, ts_sent: float, ts_recv: float, data: Any) -> "InputState":
# todo: in-place update when we use numpy.
size = self.seq.shape[0]
tb = [self.seq, self.ts_sent, self.ts_recv, self.data]
new_t = [seq, ts_sent, ts_recv, data]
# get new values
if size > 1:
new = jp.tree_map(lambda tb, t: self._shift(tb, t), tb, new_t)
else:
new = jp.tree_map(lambda _tb, _t: rjp.index_update(_tb, jp.int32(0), _t, copy=True), tb, new_t)
return InputState(*new)
def __getitem__(self, val):
tb = [self.seq, self.ts_sent, self.ts_recv, self.data]
return InputState(*jp.tree_map(lambda _tb: _tb[val], tb))
@struct.dataclass
class StepState:
rng: jp.ndarray
inputs: FrozenDict[str, InputState]
state: State
params: Params
@struct.dataclass
class GraphState:
nodes: FrozenDict[str, StepState]
step: rjp.int32 = struct.field(pytree_node=True, default_factory=lambda: jp.int32(0))
outputs: FrozenDict[str, Output] = struct.field(pytree_node=True, default_factory=lambda: FrozenDict({})) | /rex_lib-0.0.1-py3-none-any.whl/rex/base.py | 0.665628 | 0.476397 | base.py | pypi |
from typing import Tuple, Deque, Dict
from collections import deque
from concurrent.futures import Future, CancelledError
import jumpy as jp
from rex.base import StepState, InputState, GraphState, Output, Params, State
from rex.node import Node
class Agent(Node):
def __init__(self, *args, **kwargs):
self._must_reset: bool
self._f_act: Future
self._f_obs: Future
self._q_act: Deque[Future] = deque()
self._q_obs: Deque[Future]
super().__init__(*args, **kwargs)
def default_params(self, rng: jp.ndarray, graph_state: GraphState = None) -> Params:
"""Default params of the node."""
raise NotImplementedError
def default_state(self, rng: jp.ndarray, graph_state: GraphState = None) -> State:
"""Default state of the node."""
raise NotImplementedError
def default_output(self, rng: jp.ndarray, graph_state: GraphState = None) -> Output:
"""Default output of the node."""
raise NotImplementedError
def default_inputs(self, rng: jp.ndarray, graph_state: GraphState = None) -> Dict[str, InputState]:
"""Default inputs of the node."""
return super().default_inputs(rng, graph_state)
def reset(self, rng: jp.ndarray, graph_state: GraphState = None) -> StepState:
"""Reset the agent."""
raise NotImplementedError
def get_step_state(self, graph_state: GraphState) -> StepState:
"""Get the step state of the agent."""
return graph_state.nodes[self.name]
@property
def action(self) -> Deque[Future]:
return self._q_act
@property
def observation(self) -> Deque[Future]:
return self._q_obs
def _agent_reset(self):
self._must_reset = False
self._q_act: Deque[Future] = deque()
self._q_obs: Deque[Future] = deque()
self._f_obs = Future()
self._q_obs.append(self._f_obs)
def step(self, ts: jp.float32, step_state: StepState) -> Tuple[StepState, Output]:
self._f_act = Future()
self._q_act.append(self._f_act)
# Prepare new obs future
_new_f_obs = Future()
self._q_obs.append(_new_f_obs)
# Set observations as future result
self._f_obs.set_result((ts, step_state))
self._f_obs = _new_f_obs
# Wait for action future's result to be set with action
if not self._must_reset:
try:
step_state, output = self._f_act.result()
self._q_act.popleft()
return step_state, output
except CancelledError: # If cancelled is None, we are going to reset
self._q_act.popleft()
self._must_reset = True
return None, None # Do not return anything if we must reset | /rex_lib-0.0.1-py3-none-any.whl/rex/agent.py | 0.895019 | 0.352982 | agent.py | pypi |
import time
import abc
from typing import Any, Dict, List, Tuple, Union
import jumpy as jp
import jax.numpy as jnp
import numpy as onp
from flax.core import FrozenDict
from rex.agent import Agent
from rex.constants import SYNC, SIMULATED, PHASE, FAST_AS_POSSIBLE
from rex.base import StepState, GraphState
from rex.proto import log_pb2
from rex.node import Node
float32 = Union[jnp.float32, onp.float32]
class BaseGraph:
def __init__(self, agent: Agent):
self.agent = agent
@abc.abstractmethod
def reset(self, graph_state: GraphState) -> Tuple[GraphState, float32, StepState]:
raise NotImplementedError
@abc.abstractmethod
def step(self, graph_state: GraphState, step_state: StepState, action: Any) -> Tuple[GraphState, float32, StepState]:
raise NotImplementedError
def stop(self, timeout: float = None):
pass
def start(self):
pass
class Graph(BaseGraph):
def __init__(
self,
nodes: Dict[str, "Node"],
agent: Agent,
sync: int = SYNC,
clock: int = SIMULATED,
scheduling: int = PHASE,
real_time_factor: Union[int, float] = FAST_AS_POSSIBLE,
):
self.nodes = nodes
self._nodes_and_agent = {**nodes, agent.name: agent}
self.sync = sync
self.clock = clock
self.scheduling = scheduling
self.real_time_factor = real_time_factor
super().__init__(agent=agent)
def reset(self, graph_state: GraphState) -> Tuple[GraphState, float32, Any]:
# Stop first, if we were previously running.
self.stop()
# An additional reset is required when running async (futures, etc..)
self.agent._agent_reset()
# Reset async backend of every node
for node in self._nodes_and_agent.values():
node._reset(
graph_state,
sync=self.sync,
clock=self.clock,
scheduling=self.scheduling,
real_time_factor=self.real_time_factor,
)
# Check that all nodes have the same episode counter
assert len({n.eps for n in self._nodes_and_agent.values()}) == 1, "All nodes must have the same episode counter."
# Start nodes (provide same starting timestamp to every node)
start = time.time()
[n._start(start=start) for n in self._nodes_and_agent.values()]
# Retrieve first obs
next_ts_step, next_step_state = self.agent.observation.popleft().result()
# Create the next graph state
nodes = {name: node._step_state for name, node in self._nodes_and_agent.items()}
nodes[self.agent.name] = next_step_state
next_graph_state = GraphState(step=jp.int32(0), nodes=FrozenDict(nodes))
return next_graph_state, next_ts_step, next_step_state
def step(self, graph_state: GraphState, step_state: StepState, output: Any) -> Tuple[GraphState, float32, StepState]:
# Set the result to be the step_state and output (action) of the agent.
self.agent.action[-1].set_result((step_state, output))
# Retrieve the first obs
next_ts_step, next_step_state = self.agent.observation.popleft().result()
# Create the next graph state
nodes = {name: node._step_state for name, node in self._nodes_and_agent.items()}
nodes[self.agent.name] = next_step_state
next_graph_state = GraphState(step=graph_state.step + 1, nodes=FrozenDict(nodes))
return next_graph_state, next_ts_step, next_step_state
def stop(self, timeout: float = None):
# Initiate stop (this unblocks the agent's step, that is waiting for an action).
if len(self.agent.action) > 0:
self.agent.action[-1].cancel()
# Stop all nodes
fs = [n._stop(timeout=timeout) for n in self._nodes_and_agent.values()]
# Wait for all nodes to stop
[f.result() for f in fs] | /rex_lib-0.0.1-py3-none-any.whl/rex/graph.py | 0.776199 | 0.357343 | graph.py | pypi |
from typing import Any, Tuple, Dict, Union, Optional
import gym
import jumpy as jp
import abc
from rex.spaces import Space
from rex.utils import log
from rex.node import Node
from rex.graph import Graph
from rex.compiled import CompiledGraph
from rex.base import GraphState, Params
from rex.proto import log_pb2
from rex.constants import SYNC, SIMULATED, PHASE, FAST_AS_POSSIBLE, INTERPRETED, VECTORIZED, SEQUENTIAL, BATCHED, WARN
from rex.agent import Agent
class BaseEnv:
def __init__(self,
nodes: Dict[str, "Node"],
agent: Agent,
max_steps: int = 200,
sync: int = SYNC,
clock: int = SIMULATED,
scheduling: int = PHASE,
real_time_factor: Union[int, float] = FAST_AS_POSSIBLE,
graph: int = INTERPRETED,
trace: log_pb2.TraceRecord = None,
log_level: int = WARN,
name: str = "env",
color: str = "blue",
):
self.log_level = log_level
self.name = name
self.color = color
self.max_steps = 100 if max_steps is None else max_steps
assert self.max_steps > 0, "max_steps must be a positive integer"
# Check that the agent is of the correct type
assert isinstance(agent, Agent), "The agent must be an instance of Agent"
assert len([n for n in nodes.values() if n.name == agent.name]) == 0, "The agent should be provided separately, so not inside the `nodes` dict"
# Initialize graph
if graph in [VECTORIZED, SEQUENTIAL, BATCHED]:
assert trace is not None, "Compiled graphs require a trace"
self.graph = CompiledGraph(nodes, trace, agent, graph)
assert self.graph.max_steps >= self.max_steps, f"max_steps ({self.max_steps}) must be smaller than the max number of compiled steps in the graph ({self.graph.max_steps})"
elif graph == INTERPRETED:
if trace is not None:
self.log("WARNING", "trace is ignored. Set `graph` to a compiled setting (.e.g SEQUENTIAL) to use it.", log_level=WARN)
self.graph = Graph(nodes, agent, sync, clock, scheduling, real_time_factor)
else:
raise ValueError(f"Unknown graph mode: {graph}")
@abc.abstractmethod
def reset(self, rng: jp.ndarray, graph_state: GraphState = None) -> Tuple[GraphState, Any]:
raise NotImplementedError
@abc.abstractmethod
def step(self, graph_state: GraphState, action: Any) -> Tuple[GraphState, Any, float, bool, Dict]:
raise NotImplementedError
def close(self):
self.stop()
def stop(self):
return self.graph.stop()
def render(self):
raise NotImplementedError
def action_space(self, params: Params = None) -> Space:
"""Action space of the environment."""
raise NotImplementedError
def observation_space(self, params: Params = None) -> Space:
"""Observation space of the environment."""
raise NotImplementedError
@property
def unwrapped(self):
return self
def env_is_wrapped(self, wrapper_class, indices=None):
return False
def log(self, id: str, value: Optional[Any] = None, log_level: Optional[int] = None):
log_level = log_level if isinstance(log_level, int) else self.log_level
log(self.name, self.color, min(log_level, self.log_level), id, value) | /rex_lib-0.0.1-py3-none-any.whl/rex/env.py | 0.9277 | 0.331782 | env.py | pypi |
from multiprocessing import Pool
from .rmd_file_list import BiLingRmdFileList, BiLingualRmdFilePair
from .item import RExamItem
from ..misc import iter_list
class _SearchSchemata(object):
def __init__(self):
self.search_types = []
self.parameter = []
self.functions = []
def add(self, search_type, parameter):
"""para: search_type has to be 'name', 'question', 'solution',
'meta_info' or 'raw_rmd'
if parameter is iterable (but not a string), it will be multiple
functions with different parameter.
"""
if not isinstance(parameter, str) and hasattr(parameter, "__iter__"):
for p in parameter:
self._add(search_type, p)
else:
self._add(search_type, parameter)
def _add(self, search_type, parameter):
if search_type == "name":
f = lambda x: x.name.find(parameter)
elif search_type == "question":
f = lambda x: x.question.str_text().find(parameter)
elif search_type == "solution":
f = lambda x: x.solution.str_text().find(parameter)
elif search_type == "meta_info":
f = lambda x: x.meta_info.str_text().find(parameter)
elif search_type == "raw_rmd":
f = lambda x: str(x).find(parameter)
else:
raise RuntimeError("{} is an unknown SearchFunction "
"type.".format(search_type))
self.functions.append(f)
self.parameter.append(parameter)
self.search_types.append(search_type)
class EntryItemDatabase(object):
# biligual RExamItem
def __init__(self, shared_name, item_l1, item_l2):
assert isinstance(item_l1, RExamItem) or item_l1 is None
assert isinstance(item_l2, RExamItem) or item_l2 is None
self.shared_name = shared_name
self.item_l1 = item_l1
self.item_l2 = item_l2
self.id = None
def is_same_as(self, item):
"""compares shared names and version id
and ignores the id"""
if isinstance(item, EntryItemDatabase):
return self.shared_name == item.shared_name and \
self.hash_l1() == item.hash_l1() and \
self.hash_l2() == item.hash_l2()
else:
return False
def hash_l1(self):
try:
return self.item_l1.hash()
except:
return ""
def hash_l2(self):
try:
return self.item_l2.hash()
except:
return ""
def hash_l1_short(self):
try:
return self.item_l1.hash_short()
except:
return ""
def hash_l2_short(self):
try:
return self.item_l2.hash_short()
except:
return ""
def short_repr(self, max_lines, use_l2, add_versions=False, short_version=True):
if use_l2:
try:
txt = self.item_l2.question.str_text_short(max_lines, ignore_empty_lines=True)
except:
txt = ""
else:
try:
txt = self.item_l1.question.str_text_short(max_lines, ignore_empty_lines=True)
except:
txt = ""
rtn = [self.shared_name, txt]
if add_versions:
if short_version:
if use_l2:
rtn.append(self.hash_l2_short())
else:
rtn.append(self.hash_l1_short())
else:
if use_l2:
rtn.append(self.hash_l2())
else:
rtn.append(self.hash_l1())
return rtn
@staticmethod
def load(biling_filelist_entry, shared_name_with_bilingual_tag=False):
assert isinstance(biling_filelist_entry, BiLingualRmdFilePair)
if biling_filelist_entry.rmdfile_l1 is not None:
l1 = RExamItem(biling_filelist_entry.rmdfile_l1)
else:
l1 = None
if biling_filelist_entry.rmdfile_l2 is not None:
l2 = RExamItem(biling_filelist_entry.rmdfile_l2)
else:
l2 = None
return EntryItemDatabase(
shared_name=biling_filelist_entry.shared_name(
add_bilingual_tag=shared_name_with_bilingual_tag),
item_l1=l1,
item_l2=l2)
class ItemDatabase(BiLingRmdFileList):
def __init__(self, base_directory, files_first_level,
files_second_level,
check_for_bilingual_files):
"""file_list_bilingual: path or file_list_biligual
"""
super().__init__(base_directory=base_directory,
files_first_level=files_first_level,
files_second_level=files_second_level,
check_for_bilingual_files=check_for_bilingual_files)
self._selected_ids = []
## LOAD DATA
if len(self.files) > 1000:
# speed up with multiprocessing
entries = Pool().map(EntryItemDatabase.load, self.files)
else:
entries = map(EntryItemDatabase.load, self.files)
self.entries = list(entries)
# add unique ids
for x in range(len(self.entries)):
self.entries[x].id = x
self.select() # select all
def get_entries(self, ids, rm_nones=True):
"""returns subset of DatabaseEntries items """
rtn = []
for i in ids:
try:
rtn.append(self.entries[i])
except:
if not rm_nones:
rtn.append(None)
return rtn
@property
def selected_entries(self):
"""selected name, l1, l2, l1_hash,
l2_hash"""
return self.get_entries(self._selected_ids, rm_nones=True)
def _search_select(self, search_function, item_ids_subset):
"""searches rexam file using search_function and returns idx,
if found for at least one of the language.
Use item_ids_subset (array if ids) to define the the subset of
items, in which you want to serach """
idx = []
for x in item_ids_subset:
try:
found = search_function(self.entries[x].item_l1)
except:
found = -1
if found<0:
try:
found = search_function(self.entries[x].item_l2)
except:
found = -1
if found>=0:
idx.append(x)
return idx
def select(self, name=None, question=None, solution=None,
meta_information=None, raw_rmd=None,
search_logic_or=False):
"""select items based on text search"""
# select all
self._selected_ids = range(len(self.entries))
search = _SearchSchemata()
search.add("name", iter_list(name))
search.add("question", iter_list(question))
search.add("solution", iter_list(solution))
search.add("meta_info", iter_list(meta_information))
search.add("raw_rmd", iter_list(raw_rmd))
if search_logic_or:
# OR
idx = []
for fnc in search.functions:
for x in self._search_select(fnc, item_ids_subset=self._selected_ids):
if x not in idx:
idx.append(x)
self._selected_ids = sorted(idx)
else:
# AND
for fnc in search.functions:
self._selected_ids = self._search_select(fnc, item_ids_subset=self._selected_ids)
return self._selected_ids
def find_entry(self, entry_item_database):
"""returns all id of identical entries """
same = filter(lambda x:x.is_same_as(entry_item_database), self.entries)
rtn = map(lambda x: x.id, same)
return list(rtn)
def find(self, hash_l1,
hash_l2=None,
relative_path_l1 = None,
relative_path_l2=None,
shared_name = None,
find_all=False):
"""advanced search function
returns on first id or array with all is (find_all=True)
the found entries
"""
rtn = []
for cnt, e in enumerate(self.entries):
a = shared_name is None or e.shared_name == shared_name
b = relative_path_l1 is None or (e.item_l1 is not None and
e.item_l1.relative_path == relative_path_l1)
c = relative_path_l2 is None or \
(e.item_l2 is not None and \
e.item_l2.relative_path == relative_path_l2)
if a and b and c:
if e.hash_l1() == hash_l1 and \
(hash_l2 is None or
e.hash_l2() == hash_l2):
if find_all:
rtn.append(cnt)
else:
return cnt
if not find_all:
return None
else:
return rtn | /rexam_item_editor-0.2.11.post2-py3-none-any.whl/rexam_item_editor/rexam/item_database.py | 0.48438 | 0.23079 | item_database.py | pypi |
from os import path
from copy import deepcopy
import shutil
from .filepath import FilePath, os_rename
from ..consts import CODE_L1, CODE_L2
SEP = "-"
TAG_L1 = SEP + CODE_L1
TAG_L2 = SEP + CODE_L2
TAG_BILINGUAL = "{}[{}/{}]".format(SEP, CODE_L1, CODE_L2)
def _copytree(source_folder, destination_folder):
"""copies a folder and return error if it occurs"""
try:
shutil.copytree(source_folder, destination_folder)
except IOError as io_error:
return io_error
class RmdFile(FilePath):
SUFFIX = ".Rmd"
@staticmethod
def make_path(base_directory, name, add_subdir=True):
if add_subdir:
return path.join(base_directory, name, "{}{}".format(name,
RmdFile.SUFFIX))
else:
return path.join(base_directory, "{}{}".format(name,
RmdFile.SUFFIX))
@property
def language_code(self):
if len(self.name)>=4 and self.name[-3] == SEP:
lang = self.name[-2:].lower()
if lang in (CODE_L1, CODE_L2):
return lang
return ""
@language_code.setter
def language_code(self, v):
assert(isinstance(v, str) and len(v)==2)
if self.name[-3] == SEP:
self.name = self.name[-2] + v
else:
self.name = self.name + SEP + v
def subdir_mirrors_filename(self):
return self.name == self.sub_directory
def get_other_language_rmdfile(self):
if len(self.language_code):
name = self.name[:-2]
if self.language_code == CODE_L1:
name += CODE_L2
else:
name += CODE_L1
rtn = deepcopy(self)
rtn.name = name
if len(self.sub_directory):
rtn.sub_directory = name
return rtn
else:
return None
def copy_subdir_files(self, new_name):
"""Returns io error, if it occurs other the new RmdFile object"""
new = deepcopy(self)
new.name = new_name
new.sub_directory = new_name
# copy files
ioerror = _copytree(self.directory, new.directory)
if ioerror:
return ioerror
else:
copied_rmd = deepcopy(new)
copied_rmd.name = self.name # has still old name
ioerror = os_rename(copied_rmd.full_path, new.full_path)
if ioerror:
return ioerror
return new | /rexam_item_editor-0.2.11.post2-py3-none-any.whl/rexam_item_editor/rexam/rmd_file.py | 0.409575 | 0.174481 | rmd_file.py | pypi |

[](https://opensource.org/licenses/Apache-2.0)
[](https://github.com/AstraZeneca/rexmex/archive/master.zip)
[](https://github.com/AstraZeneca/rexmex/actions?query=workflow%3ACI)
[](https://codecov.io/gh/AstraZeneca/rexmex)
<p align="center">
<img width="90%" src="https://github.com/AstraZeneca/rexmex/blob/main/rexmex_small.jpg?raw=true?sanitize=true" />
</p>
--------------------------------------------------------------------------------
**reXmeX** is recommender system evaluation metric library.
Please look at the **[Documentation](https://rexmex.readthedocs.io/en/latest/)** and **[External Resources](https://rexmex.readthedocs.io/en/latest/notes/resources.html)**.
**reXmeX** consists of utilities for recommender system evaluation. First, it provides a comprehensive collection of metrics for the evaluation of recommender systems. Second, it includes a variety of methods for reporting and plotting the performance results. Implemented metrics cover a range of well-known metrics and newly proposed metrics from data mining ([ICDM](http://icdm2019.bigke.org/), [CIKM](http://www.cikm2019.net/), [KDD](https://www.kdd.org/kdd2020/)) conferences and prominent journals.
**Citing**
If you find *RexMex* useful in your research, please consider adding the following citation:
```bibtex
@inproceedings{rexmex,
title = {{rexmex: A General Purpose Recommender Metrics Library for Fair Evaluation.}},
author = {Benedek Rozemberczki and Sebastian Nilsson and Piotr Grabowski and Charles Tapley Hoyt and Gavin Edwards},
year = {2021},
}
```
--------------------------------------------------------------------------------
**An introductory example**
The following example loads a synthetic dataset which has the mandatory `y_true` and `y_score` keys. The dataset has binary labels and predictied probability scores. We read the dataset and define a defult `ClassificationMetric` instance for the evaluation of the predictions. Using this metric set we create a score card and get the predictive performance metrics.
```python
from rexmex import ClassificationMetricSet, DatasetReader, ScoreCard
reader = DatasetReader()
scores = reader.read_dataset()
metric_set = ClassificationMetricSet()
score_card = ScoreCard(metric_set)
report = score_card.get_performance_metrics(scores["y_true"], scores["y_score"])
```
--------------------------------------------------------------------------------
**An advanced example**
The following more advanced example loads the same synthetic dataset which has the `source_id`, `target_id`, `source_group` and `target group` keys besides the mandatory `y_true` and `y_score`. Using the `source_group` key we group the predictions and return a performance metric report.
```python
from rexmex import ClassificationMetricSet, DatasetReader, ScoreCard
reader = DatasetReader()
scores = reader.read_dataset()
metric_set = ClassificationMetricSet()
score_card = ScoreCard(metric_set)
report = score_card.generate_report(scores, grouping=["source_group"])
```
--------------------------------------------------------------------------------
**Scorecard**
A **rexmex** score card allows the reporting of recommender system performance metrics, plotting the performance metrics and saving those. Our framework provides 7 rating, 38 classification, 18 ranking, and 2 coverage metrics.
**Metric Sets**
Metric sets allow the users to calculate a range of evaluation metrics for a label - predicted label vector pair. We provide a general `MetricSet` class and specialized metric sets with pre-set metrics have the following general categories:
- **Ranking**
- **Rating**
- **Classification**
- **Coverage**
--------------------------------------------------------------------------------
**Ranking Metric Set**
* **[Normalized Distance Based Performance Measure (NDPM)](https://asistdl.onlinelibrary.wiley.com/doi/abs/10.1002/%28SICI%291097-4571%28199503%2946%3A2%3C133%3A%3AAID-ASI6%3E3.0.CO%3B2-Z)**
* **[Discounted Cumulative Gain (DCG)](https://en.wikipedia.org/wiki/Discounted_cumulative_gain)**
* **[Normalized Discounted Cumulative Gain (NDCG)](https://en.wikipedia.org/wiki/Discounted_cumulative_gain)**
* **[Reciprocal Rank](https://en.wikipedia.org/wiki/Mean_reciprocal_rank)**
<details>
<summary><b>Expand to see all ranking metrics in the metric set.</b></summary>
* **[Mean Reciprocal Rank (MRR)](https://en.wikipedia.org/wiki/Mean_reciprocal_rank)**
* **[Spearmanns Rho](https://en.wikipedia.org/wiki/Spearman%27s_rank_correlation_coefficient)**
* **[Kendall Tau](https://en.wikipedia.org/wiki/Kendall_rank_correlation_coefficient)**
* **[HITS@k](https://en.wikipedia.org/wiki/Evaluation_measures_(information_retrieval))**
* **[Novelty](https://www.sciencedirect.com/science/article/pii/S163107051930043X)**
* **[Average Recall @ k](https://en.wikipedia.org/wiki/Evaluation_measures_(information_retrieval))**
* **[Mean Average Recall @ k](https://en.wikipedia.org/wiki/Evaluation_measures_(information_retrieval))**
* **[Average Precision @ k](https://en.wikipedia.org/wiki/Evaluation_measures_(information_retrieval))**
* **[Mean Average Precision @ k](https://en.wikipedia.org/wiki/Evaluation_measures_(information_retrieval))**
* **[Personalisation](http://www.mavir.net/docs/tfm-vargas-sandoval.pdf)**
* **[Intra List Similarity](http://www.mavir.net/docs/tfm-vargas-sandoval.pdf)**
</details>
--------------------------------------------------------------------------------
**Rating Metric Set**
These metrics assume that items are scored explicitly and ratings are predicted by a regression model.
* **[Mean Squared Error (MSE)](https://en.wikipedia.org/wiki/Mean_squared_error)**
* **[Root Mean Squared Error (RMSE)](https://en.wikipedia.org/wiki/Mean_squared_error)**
* **[Mean Absolute Error (MAE)](https://en.wikipedia.org/wiki/Mean_absolute_error)**
* **[Mean Absolute Percentage Error (MAPE)](https://en.wikipedia.org/wiki/Mean_absolute_percentage_error)**
<details>
<summary><b>Expand to see all rating metrics in the metric set.</b></summary>
* **[Symmetric Mean Absolute Percentage Error (SMAPE)](https://en.wikipedia.org/wiki/Symmetric_mean_absolute_percentage_error)**
* **[Pearson Correlation](https://en.wikipedia.org/wiki/Pearson_correlation_coefficient)**
* **[Coefficient of Determination](https://en.wikipedia.org/wiki/Coefficient_of_determination)**
</details>
--------------------------------------------------------------------------------
**Classification Metric Set**
These metrics assume that the items are scored with raw probabilities (these can be binarized).
* **[Precision (or Positive Predictive Value)](https://en.wikipedia.org/wiki/Precision_and_recall)**
* **[Recall (Sensitivity, Hit Rate, or True Positive Rate)](https://en.wikipedia.org/wiki/Precision_and_recall)**
* **[Area Under the Precision Recall Curve (AUPRC)](https://besjournals.onlinelibrary.wiley.com/doi/10.1111/2041-210X.13140)**
* **[Area Under the Receiver Operating Characteristic (AUROC)](https://en.wikipedia.org/wiki/Receiver_operating_characteristic)**
<details>
<summary><b>Expand to see all classification metrics in the metric set.</b></summary>
* **[F-1 Score](https://en.wikipedia.org/wiki/F-score)**
* **[Average Precision](https://scikit-learn.org/stable/modules/generated/sklearn.metrics.average_precision_score.html)**
* **[Specificty (Selectivity or True Negative Rate )](https://en.wikipedia.org/wiki/Precision_and_recall)**
* **[Matthew's Correlation](https://en.wikipedia.org/wiki/Precision_and_recall)**
* **[Accuracy](https://en.wikipedia.org/wiki/Precision_and_recall)**
* **[Balanced Accuracy](https://en.wikipedia.org/wiki/Precision_and_recall)**
* **[Fowlkes-Mallows Index](https://en.wikipedia.org/wiki/Precision_and_recall)**
</details>
--------------------------------------------------------------------------------
**Coverage Metric Set**
These metrics measure how well the recommender system covers the available items in the catalog and possible users.
In other words measure the diversity of predictions.
* **[Item Coverage](https://www.bgu.ac.il/~shanigu/Publications/EvaluationMetrics.17.pdf)**
* **[User Coverage](https://www.bgu.ac.il/~shanigu/Publications/EvaluationMetrics.17.pdf)**
--------------------------------------------------------------------------------
**Documentation and Reporting Issues**
Head over to our [documentation](https://rexmex.readthedocs.io) to find out more about installation and data handling, a full list of implemented methods, and datasets.
If you notice anything unexpected, please open an [issue](https://github.com/AstraZeneca/rexmex/issues) and let us know. If you are missing a specific method, feel free to open a [feature request](https://github.com/AstraZeneca/rexmex/issues).
We are motivated to constantly make RexMex even better.
--------------------------------------------------------------------------------
**Installation via the command line**
RexMex can be installed with the following command after the repo is cloned.
```sh
$ pip install .
```
Use `-e/--editable` when developing.
**Installation via pip**
RexMex can be installed with the following pip command.
```sh
$ pip install rexmex
```
As we create new releases frequently, upgrading the package casually might be beneficial.
```sh
$ pip install rexmex --upgrade
```
--------------------------------------------------------------------------------
**Running tests**
Tests can be run with `tox` with the following:
```sh
$ pip install tox
$ tox -e py
```
--------------------------------------------------------------------------------
**Citation**
If you use RexMex in a scientific publication, we would appreciate citations. Please see GitHub's built-in citation tool.
--------------------------------------------------------------------------------
**License**
- [Apache-2.0 License](https://github.com/AZ-AI/rexmex/blob/master/LICENSE)
| /rexmex-0.1.3.tar.gz/rexmex-0.1.3/README.md | 0.624752 | 0.958809 | README.md | pypi |
from typing import List, Tuple, Dict, Literal, Optional, Union
from os.path import abspath as os_abspath, basename as os_basename
from urllib.parse import urlsplit as urllib_urlsplit, quote as urllib_quote, unquote as urllib_unquote
from requests.api import request as requests_request
from requests.models import Response
from filetype import guess as filetype_guess
from .rfile import read_file
from .rregular import search
def url_join(url: str, params: Dict) -> str:
"""
Join `URL` and `parameters`.
Parameters
----------
url : URL.
params : Parameters of URL.
Returns
-------
Joined URL.
"""
# Join parameters.
params_str = "&".join(
[
"%s=%s" % (key, urllib_quote(val))
for key, val in params.items()
]
)
# Join URL.
if "?" not in url:
url += "?"
elif url[-1] != "?":
url += "&"
url += params_str
return url
def url_split(url: str) -> Tuple[str, Dict[str, str]]:
"""
Split `URL` and `parameters`.
Parameters
----------
url : URL.
Returns
-------
Split URL and parameters.
"""
# Split URL.
split_result = urllib_urlsplit(url)
params_str = split_result.query
url = split_result.scheme + "://" + split_result.netloc + split_result.path
# Split parameters.
params = {
key: urllib_unquote(val)
for key, val in map(
lambda item: item.split("=", 1),
params_str.split("&")
)
}
return url, params
def cookie_join(params: Dict[str, str]) -> str:
"""
Join parameters of `Cookie`.
Parameters
----------
params : Parameters.
Returns
-------
Joined cookie.
"""
# Join.
cookie = "; ".join(
[
"%s=%s" % (key, val)
for key, val in params.items()
]
)
return cookie
def cookie_split(cookie: str) -> Dict[str, str]:
"""
Split parameters of `Cookie`.
Parameters
----------
cookie : Cookie.
Returns
-------
Split parameters
"""
# Split parameters.
params = {
key: val
for key, val in map(
lambda item: item.split("=", 1),
cookie.split("; ")
)
}
return params
def content_type(file: Union[str, bytes]) -> str:
"""
Guess HTTP `content type` of file.
Parameters
----------
file : File path or bytes data.
Returns
-------
HTTP content type.
"""
# Guess.
file_type_obj = filetype_guess(file)
if file_type_obj is not None:
return file_type_obj.MIME
def request(
url: str,
params: Optional[Dict] = None,
data: Optional[Union[Dict, str, bytes]] = None,
json: Optional[Dict] = None,
files: Optional[Dict[str, Union[str, bytes, Tuple[Union[str, bytes], dict]]]] = None,
headers: Dict = {},
timeout: Optional[float] = None,
proxies: Dict[str, str] = {},
method: Optional[Literal["get", "post", "put", "patch", "delete"]] = None,
throw_e: bool = False
) -> Response:
"""
`Send` request.
Parameters
----------
url : Request URL.
params : Request URL add parameters.
data : Request body data.
- `Dict` : Convert to `key=value&...` format bytes.
Auto set `Content-Type` to `application/x-www-form-urlencoded`.
- `str` : File path to read file bytes data.
Auto set `Content-Type` to file media type, and `filename` to file name.
- `bytes` : File bytes data.
Auto set `Content-Type` to file media type.
json : Request body data, convert to `JSON` format.
Auto set `Content-Type` to `application/json`.
files : Request body data, convert to `multi form` format.
Auto set `Content-Type` to `multipart/form-data`.
- `Dict[str, str]` : Parameter name and File path to read file bytes data.
Auto set `Content-Type` to file media type, and `filename` to file name.
- `Dict[str, bytes]` : Parameter name and file bytes data.
- `Dict[str, Tuple[str, dict]` : Parameter name and File path to read file bytes data and other parameters.
Auto set `Content-Type` to file media type, and `filename` to file name.
- `Dict[str, Tuple[bytes, dict]` : Parameter name and file bytes data and other parameters.
headers : Request header data.
timeout : Request maximun waiting time.
- `None` : No limit.
- `Union[int, float]` : Use this value.
proxies : Proxy IP setup.
- `None` : No setup.
- `Dict[str, str]` : Name and use IP of each protocol.
method : Request method.
- `None` : Automatic judge.
* When parameter `data` or `json` or `files` not has value, then request method is `get`.
* When parameter `data` or `json` or `files` has value, then request method is `post`.
- `Literal['get', 'post', 'put', 'patch', 'delete']` : Use this request method.
throw_e : Whether throw `exception`, when response code is not `200`.
Returns
-------
Response object of requests package.
"""
# Handle parameters.
if method is None:
if data is None and json is None and files is None:
method = "get"
else:
method = "post"
if files is None:
if data.__class__ == str:
if "Content-Disposition" not in headers:
file_name = os_basename(data)
headers["Content-Disposition"] = "attachment; filename=%s" % file_name
data = read_file(data)
if data.__class__ == bytes:
if "Content-Type" not in headers:
headers["Content-Type"] = content_type(data)
else:
for key, val in files.items():
if val.__class__ == tuple:
item_data, item_headers = val
else:
item_data, item_headers = val, {}
if item_data.__class__ == str:
if "filename" not in item_headers:
item_headers["filename"] = os_basename(item_data)
item_data = read_file(item_data)
if item_data.__class__ == bytes:
if "Content-Type" not in item_headers:
item_headers["Content-Type"] = content_type(item_data)
files[key] = item_headers.get("filename", key), item_data, item_headers.get("Content-Type"), item_headers
# Request.
response = requests_request(
method,
url,
params=params,
data=data,
json=json,
files=files,
headers=headers,
timeout=timeout,
proxies=proxies,
)
# Set encod type.
if response.encoding == "ISO-8859-1":
response.encoding = "utf-8"
# Throw exception.
assert not (throw_e and response.status_code != 200), "response code is not 200, but %s" % response.status_code
return response
def download(url: str, path: Optional[str] = None) -> str:
"""
`Download` file from URL.
Parameters
----------
url : Download URL.
path : Save path.
- `None` : File name is `download` and auto judge file type.
Returns
-------
File absolute path.
"""
# Download.
response = request(url)
content = response.content
# Judge file type and path.
if path is None:
Content_disposition = response.headers.get("Content-Disposition", "")
if "filename" in Content_disposition:
file_name = search(
"filename=['\"]?([^\s'\"]+)",
Content_disposition
)
else:
file_name = None
if file_name is None:
file_type_obj = filetype_guess(content)
if file_type_obj is not None:
file_name = "download." + file_type_obj.EXTENSION
if file_name is None:
file_name = "download"
path = os_abspath(file_name)
# Save.
with open(path, "wb") as file:
file.write(content)
return path | /reyapi-0.3-py3-none-any.whl/reytool/rrequest.py | 0.859428 | 0.309369 | rrequest.py | pypi |
from typing import List, Tuple, Optional, Union, Literal, overload
from re import search as re_search, sub as re_sub, findall as re_findall
from .rdata import unique
def search(pattern: str, text: str) -> Optional[Union[str, Tuple[Optional[str], ...]]]:
"""
Regular `matching` text.
Parameters
----------
pattern : Regular pattern.
text : Match text.
Returns
-------
Matching result.
- When match to and not use `group`, then return string.
- When match to and use `group`, then return tuple with value string or None.
If tuple length is `1`, extract and return string.
- When no match, then return None.
"""
# Search.
obj_re = re_search(pattern, text)
# Return result.
if obj_re is not None:
result = obj_re.groups()
if result == ():
result = obj_re[0]
elif len(result) == 1:
result = obj_re[1]
return result
@overload
def search_batch(text: str, *patterns: str, first: bool = True) -> Union[
Optional[Union[str, Tuple[Optional[str], ...]]],
List[Optional[Union[str, Tuple[Optional[str], ...]]]]
]: ...
@overload
def search_batch(first: Literal[True]) -> Optional[Union[str, Tuple[Optional[str], ...]]]: ...
@overload
def search_batch(first: Literal[False]) -> List[Optional[Union[str, Tuple[Optional[str], ...]]]]: ...
def search_batch(text: str, *patterns: str, first: bool = True) -> Union[
Optional[Union[str, Tuple[Optional[str], ...]]],
List[Optional[Union[str, Tuple[Optional[str], ...]]]]
]:
"""
`Batch` regular `search` text.
Parameters
----------
text : Match text.
pattern : Regular pattern.
first : Whether return first successful match.
Returns
-------
Matching result.
- When match to and not use group, then return string.
- When match to and use group, then return tuple with value string or None.
- When no match, then return.
"""
# Search.
## Return first result.
if first:
for pattern in patterns:
result = search(pattern, text)
if result is not None:
return result
## Return all result.
else:
result = [search(pattern, text) for pattern in patterns]
return result
def sub_batch(text: str, *patterns: Tuple[str, str]) -> str:
"""
`Batch` regular `replace` text.
Parameters
----------
text : Match text.
patterns : Regular pattern and replace text.
Returns
-------
Replaced result.
"""
# Replace.
for pattern, replace in patterns:
text = re_sub(pattern, replace, text)
return text
def findall_batch(text: str, *patterns: str) -> str:
"""
`Batch` regular `find all` text.
Parameters
----------
text : Match text.
patterns : Regular pattern.
Returns
-------
List of Find result.
"""
# Find all.
texts = [
string
for pattern in patterns
for string in re_findall(pattern, text)
]
# De duplicate.
texts = unique(texts)
return texts | /reyapi-0.3-py3-none-any.whl/reytool/rregular.py | 0.955569 | 0.485661 | rregular.py | pypi |
from typing import Callable, Any, Generator, Optional
from tqdm import tqdm
from concurrent.futures import ThreadPoolExecutor, as_completed
from .rwrap import update_tqdm
def threads(
func: Callable,
*args: Any,
max_workers: Optional[int] = None,
thread_name: Optional[str] = None,
timeout: Optional[int] = None,
to_tqdm: bool = True,
**kwargs: Any
) -> Generator:
"""
Concurrent `multi tasks` using thread pool.
Parameters
----------
func : Task function.
args : Position parameter of input parameter task function.
max_workers: Maximum number of threads.
- `None` : Number of CPU + 4, 32 maximum.
- `int` : Use this value, no maximum limit.
thread_name: Thread name prefix and progress bar description.
- `None` : Thread name prefix is `ThreadPoolExecutor-%d` % index, and no progress bar.
- `str` : Use this value.
timeout : Call generator maximum waiting second, overdatetime throw exception.
- `None` : Unlimited.
- `int` : Use this value.
to_tqdm : Whether print progress bar.
kwargs : Keyword parameter of input parameter task function.
Returns
-------
Generator with multi Future object, object from concurrent package.
When called, it will block until all tasks are completed.
When `for` syntax it, the task that complete first return first.
Examples
--------
Get value.
>>> results = [future.result() for future in Generator]
"""
# Handle parameters.
if thread_name is None:
thread_name = func.__name__
params_lens = {len(param) for param in args}
params_lens -= {1}
min_param_len = min(params_lens)
args = [
list(param) * min_param_len
if len(param) == 1
else param
for param in args
]
kwargs = [
[[key, val]] * min_param_len
if len(val) == 1
else [
[key, param]
for param in val
]
for key, val in kwargs.items()
]
if args:
args = zip(*args)
else:
args = [[]] * min_param_len
if kwargs:
kwargs = zip(*kwargs)
kwargs = [dict(param) for param in kwargs]
else:
kwargs = [{}] * min_param_len
params = zip(args, kwargs)
# Create thread pool.
thread_pool = ThreadPoolExecutor(max_workers, thread_name)
# Add progress bar.
if to_tqdm:
tqdm_desc = "ThreadPool " + thread_name
obj_tqdm = tqdm(desc=tqdm_desc, total=min_param_len)
func = update_tqdm(func, obj_tqdm, _execute=False)
# Start thread pool.
tasks = [thread_pool.submit(func, *args, **kwargs) for args, kwargs in params]
# Return generator.
obj_tasks = as_completed(tasks, timeout)
return obj_tasks | /reyapi-0.3-py3-none-any.whl/reytool/rmultitask.py | 0.891315 | 0.32599 | rmultitask.py | pypi |
from typing import Any, List, Tuple, Iterable, Callable, Type, Literal, Optional, Union
from types import TracebackType
from sys import exc_info
from traceback import format_exc
from warnings import warn as warnings_warn
from varname import argname
def warn(*warn_infos: Any, warn_type: Type[BaseException] = UserWarning, stacklevel: int = 3) -> None:
"""
Throw `warning`.
Parameters
----------
warn_info : Warn informations.
warn_type : Warn type.
stacklevel : Warning code location, number of recursions up the code level.
"""
# Handle parameters.
if warn_infos == ():
warn_infos = "Warning!"
elif len(warn_infos) == 1:
if warn_infos[0].__class__ == str:
warn_infos = warn_infos[0]
else:
warn_infos = str(warn_infos[0])
else:
warn_infos = str(warn_infos)
# Throw warning.
warnings_warn(warn_infos, warn_type, stacklevel)
def exc(report: Optional[str] = None) -> Tuple[str, BaseException, Any, TracebackType]:
"""
Return exception information and print, must used in `except` syntax.
Parameters
----------
report : Print report title.
- `None` : Not print.
- `str` : Print and use this title.
Returns
-------
Exception information text and exception type and exception object and exception position object.
"""
# Get exception information.
exception_report = format_exc()
exception_report = exception_report.strip()
exception_info = exc_info()
exception = exception_report, *exception_info
# Report.
if report is not None:
## Import.
from .rtext import rprint
## Execute.
rprint(exception_report, title=report, frame="half")
return exception
def check_target(value: Any, *targets: Union[Any, Literal["_iterable"]], check_element: bool = False) -> None:
"""
Check the content or type of the value, when check fail, then throw `exception`.
Parameters
---------
value : Check object.
targets : Correct target, can be type.
- `Any` : Check whether it is the target.
- `Literal['_iterable']` : Check whether it can be iterable.
check_element : Whether check element in value.
"""
# Handle parameters.
if check_element:
values = value
else:
values = [value]
# Check.
for element in values:
## Check sub elements.
if "_iterable" in targets and is_iterable(element):
continue
## Right target.
if element.__class__ in targets:
continue
for target in targets:
if element is target:
continue
## Throw exception.
var_name = get_name(value)
if var_name is not None:
var_name = " '%s'" % var_name
else:
var_name = ""
correct_targets_str = ", ".join([repr(target) for target in targets])
if check_element:
raise ValueError(
"parameter%s the elements content or type must in [%s], now: %s" % (
var_name,
correct_targets_str,
repr(value)
)
)
else:
raise ValueError(
"parameter%s the content or type must in [%s], now: %s" % (
var_name,
correct_targets_str,
repr(value)
)
)
def check_least_one(*values: Any) -> None:
"""
Check that at least one of multiple values is not `None`, when check fail, then throw `exception`.
Parameters
----------
values : Check values.
"""
# Check.
for value in values:
if value is not None:
return
# Throw exception.
vars_name = get_name(values)
if vars_name is not None:
vars_name_de_dup = list(set(vars_name))
vars_name_de_dup.sort(key=vars_name.index)
vars_name_str = " " + " and ".join(["'%s'" % var_name for var_name in vars_name_de_dup])
else:
vars_name_str = ""
raise ValueError("at least one of parameters%s is not None" % vars_name_str)
def check_most_one(*values: Any) -> None:
"""
Check that at most one of multiple values is not `None`, when check fail, then throw `exception`.
Parameters
----------
values : Check values.
"""
# Check.
none_count = 0
for value in values:
if value is not None:
none_count += 1
# Throw exception.
if none_count > 1:
vars_name = get_name(values)
if vars_name is not None:
vars_name_de_dup = list(set(vars_name))
vars_name_de_dup.sort(key=vars_name.index)
vars_name_str = " " + " and ".join(["'%s'" % var_name for var_name in vars_name_de_dup])
else:
vars_name_str = ""
raise ValueError("at most one of parameters%s is not None" % vars_name_str)
def is_iterable(obj: Any, exclude_types: Iterable[Type] = [str, bytes]) -> bool:
"""
Judge whether it is `iterable`.
Parameters
----------
obj : Judge object.
exclude_types : Non iterative types.
Returns
-------
Judgment result.
"""
# Exclude types.
if obj.__class__ in exclude_types:
return False
# Judge.
try:
obj_dir = obj.__dir__()
except TypeError:
return False
if "__iter__" in obj_dir:
return True
else:
return False
def is_table(obj: Any, check_fields: bool = True) -> bool:
"""
Judge whether it is `List[Dict]` table format and keys and keys sort of the Dict are the same.
Parameters
----------
obj : Judge object.
check_fields : Do you want to check the keys and keys sort of the Dict are the same.
Returns
-------
Judgment result.
"""
# Judge.
if obj.__class__ != list:
return False
for element in obj:
if element.__class__ != dict:
return False
## Check fields of table.
if check_fields:
keys_strs = [
":".join([str(key) for key in element.keys()])
for element in obj
]
keys_strs_only = set(keys_strs)
if len(keys_strs_only) != 1:
return False
return True
def is_number_str(string: str) -> bool:
"""
Judge whether it is `number` string.
Parameters
----------
string : String.
return_value : Whether return value.
Returns
-------
Judgment result.
"""
# Judge.
try:
int(string)
except ValueError:
return False
return True
def get_first_notnull(
*values: Any,
default: Union[None, Any, Literal["exception"]] = None,
null_values: List = [None]) -> Any:
"""
Get the first value that is not `None`.
Parameters
----------
values : Check values.
default : When all are null, then return this is value, or throw exception.
- `Any` : Return this is value.
- `Literal['exception']` : Throw `exception`.
null_values : Range of null values.
Returns
-------
Return first not null value, when all are `None`, then return default value.
"""
# Get value.
for value in values:
if value not in null_values:
return value
# Throw exception.
if default == "exception":
vars_name = get_name(values)
if vars_name is not None:
vars_name_de_dup = list(set(vars_name))
vars_name_de_dup.sort(key=vars_name.index)
vars_name_str = " " + " and ".join(["'%s'" % var_name for var_name in vars_name_de_dup])
else:
vars_name_str = ""
raise ValueError("at least one of parameters%s is not None" % vars_name_str)
return default
def to_type(obj: Any, to_type: Type, method: Optional[Callable] = None) -> Any:
"""
Convert object `type`.
Parameters
----------
obj : Convert object.
to_type : Target type.
method : Convert method.
- `None` : Use value of parameter `to_type`.
- `Callable` : Use this method.
Returns
-------
Converted object.
"""
# Judge type.
if obj.__class__ == to_type:
return obj
# Convert type.
if method is not None:
return method(obj)
else:
return to_type(obj)
def get_name(obj: Any, frame: int = 2) -> Optional[Union[str, Tuple[str, ...]]]:
"""
Get object `name`.
Parameters
----------
obj : Object.
frame : Number of code to upper level.
Returns
-------
Object name or None.
"""
# Get name using built in method.
try:
name = obj.__name__
except AttributeError:
# Get name using module method.
name = "obj"
try:
for _frame in range(1, frame + 1):
name = argname(name, frame=_frame)
if name.__class__ != str:
if "".join(name) == "":
name = None
except:
name = None
return name | /reyapi-0.3-py3-none-any.whl/reytool/rbase.py | 0.839076 | 0.310472 | rbase.py | pypi |
from typing import Any, Dict, Literal, Optional, Union, overload
from pandas import DataFrame, concat as pd_concat
from time import time as time_time, sleep as time_sleep
from datetime import (
datetime as datetime_datetime,
date as datetime_date,
time as datetime_time,
timedelta as datetime_timedelta
)
from .rbase import check_target, is_number_str
from .rother import randn
from .rtext import rprint
@overload
def now(
format: Literal["datetime", "date", "time", "timestamp", "datetime_str", "date_str", "time_str"] = "datetime_str"
) -> Union[datetime_datetime, datetime_date, datetime_time, int, str]: ...
@overload
def now(format: Literal["datatime"]) -> datetime_datetime: ...
@overload
def now(format: Literal["date"]) -> datetime_date: ...
@overload
def now(format: Literal["time"]) -> datetime_time: ...
@overload
def now(format: Literal["datetime_str", "date_str", "time_str"]) -> str: ...
@overload
def now(format: Literal["timestamp"]) -> int: ...
def now(
format: Literal["datetime", "date", "time", "datetime_str", "date_str", "time_str", "timestamp"] = "datetime_str"
) -> Union[datetime_datetime, datetime_date, datetime_time, str, int]:
"""
Get `current` time string or intger or object.
Parameters
----------
format : Format type.
- `Literal[`datetime`]` : Return datetime object of datetime package.
- `Literal[`date`]` : Return date object of datetime package.
- `Literal[`time`]` : Return time object of datetime package.
- `Literal[`datetime_str`]` : Return string in format `%Y-%m-%d %H:%M:%S`.
- `Literal[`date_str`]` : Return string in format `%Y-%m-%d`.
- `Literal[`time_str`]` : Return string in foramt `%H:%M:%S`.
- `Literal[`timestamp`]` : Return time stamp in milliseconds.
Returns
-------
Time string or object of datetime package.
"""
# Return time object by parameter format.
if format == "datetime":
return datetime_datetime.now()
elif format == "date":
return datetime_datetime.now().date()
elif format == "time":
return datetime_datetime.now().time()
elif format == "datetime_str":
return datetime_datetime.now().strftime("%Y-%m-%d %H:%M:%S")
elif format == "date_str":
return datetime_datetime.now().strftime("%Y-%m-%d")
elif format == "time_str":
return datetime_datetime.now().strftime("%H:%M:%S")
elif format == "timestamp":
return int(time_time() * 1000)
@overload
def time2str(
object_: Union[datetime_datetime, datetime_date, datetime_time, datetime_timedelta, int, Any],
format: Optional[str] = None,
throw_e: bool = False
) -> Union[str, Any]: ...
@overload
def time2str(object_: Union[datetime_datetime, datetime_date, datetime_time, datetime_timedelta, int]) -> str: ...
@overload
def time2str(object_: Any) -> Any: ...
def time2str(
object_: Union[datetime_datetime, datetime_date, datetime_time, datetime_timedelta, int, Any],
format: Optional[str] = None,
throw_e: bool = False
) -> Union[str, Any]:
"""
Format time object of package `datetime` to string.
Parameters
----------
object_ : Object of `datetime` package or int.
format : Format string.
- `None` : Automatic by type.
* Parameter `object_` is datetime_datetime : Is `%Y-%m-%d %H:%M:%S`.
* Parameter `object_` is datetime_date : Is `%Y-%m-%d`.
* Parameter `object_` is datetime_time : Is `%H:%M:%S`.
* Parameter `object_` is datetime_timedelta : Is f`{days} %H:%M:%S`.
* Parameter `object_` is time stamp : Is `%Y-%m-%d %H:%M:%S`.
- `str` : Format by this value.
throw_e : Whether throw exception, when parameter `object_` value error, otherwise return original value.
Returns
-------
String after foramt or original value.
"""
# Check parameters.
if throw_e:
check_target(object_, datetime_datetime, datetime_date, datetime_time, datetime_timedelta, int)
# Convert to time string.
## From datetime object.
if object_.__class__ == datetime_datetime:
if format is None:
string = str(object_)[:19]
else:
string = object_.strftime(format)
## From date object.
elif object_.__class__ == datetime_date:
if format is None:
string = str(object_)[:10]
else:
string = object_.strftime(format)
## From time object.
elif object_.__class__ == datetime_time:
if format is None:
string = str(object_)[:8]
else:
string = object_.strftime(format)
## From timedelta object.
elif object_.__class__ == datetime_timedelta:
if format is None:
string = str(object_)
if "day" in string:
day, char, string = string.split(" ")
else:
day = "0"
if string[1] == ":":
string = "0" + string
string = "%s %s" % (day, string[:8])
else:
seconds = object_.microseconds / 1000_000
datetime_obj = datetime_datetime.fromtimestamp(seconds)
string = datetime_obj.strftime(format)
## From int object.
elif object_.__class__ == int:
int_len = len(str(object_))
if int_len > 10:
divisor = 10 ** (int_len - 10)
seconds = object_ / divisor
else:
seconds = object_
datetime_obj = datetime_datetime.fromtimestamp(seconds)
if format is None:
format = "%Y-%m-%d %H:%M:%S"
string = datetime_obj.strftime(format)
## From other object.
else:
return object_
return string
@overload
def str2time(
string: Union[str, Any],
type_: Optional[Literal["datetime", "date", "time", "timedelta", "timestamp"]] = None,
format: Optional[str] = None,
throw_e: bool = False
) -> Union[datetime_datetime, datetime_date, datetime_time, datetime_timedelta, int, Any]: ...
@overload
def str2time(type_: Literal["datetime"]) -> Union[datetime_datetime, Any]: ...
@overload
def str2time(type_: Literal["date"]) -> Union[datetime_date, Any]: ...
@overload
def str2time(type_: Literal["time"]) -> Union[datetime_time, Any]: ...
@overload
def str2time(type_: Literal["timedelta"]) -> Union[datetime_timedelta, Any]: ...
@overload
def str2time(type_: Literal["timestamp"]) -> Union[int, Any]: ...
@overload
def str2time(type_: None) -> Union[datetime_datetime, datetime_date, datetime_time, datetime_timedelta, Any]: ...
def str2time(
string: Union[str, Any],
type_: Optional[Literal["datetime", "date", "time", "timedelta", "timestamp"]] = None,
format: Optional[str] = None,
throw_e: bool = False
) -> Union[datetime_datetime, datetime_date, datetime_time, datetime_timedelta, int, Any]:
"""
Format string to time object of package `datetime`.
Parameters
----------
string : Time string.
type_ : Format type.
- `None` : Automatic judgment.
- `Literal[`datetime`]` : Return datetime object of package datetime.
- `Literal[`date`]` : Return date object of package datetime.
- `Literal[`time`]` : Return time object of package datetime.
- `Literal[`timedelta`]` : Return timedelta object of package datetime.
- `Literal[`timestamp`]` : Return time stamp in milliseconds.
format : Format string.
- `None` : Default format method.
* Parameter `type_` is `datetime` : Is `%Y-%m-%d %H:%M:%S`.
* Parameter `type_` is `date` : Is `%Y-%m-%d`.
* Parameter `type_` is `time` : Is `%H:%M:%S`.
* Parameter `type_` is `timedelta` : Is `days %H:%M:%S`.
* Parameter `type_` is `timestamp` : Is `%Y-%m-%d %H:%M:%S`.
* Parameter `type_` is None : automatic judgment.
- `str` : Format by this value.
throw_e : Whether throw exception, when parameter `time_obj` value error, otherwise return original value.
Returns
-------
Time object of datetime package or time stamp or original value.
"""
# Check parameters.
if string.__class__ != str:
return string
# Get time format by automatic judgment.
if type_ is None:
str_len = len(string)
if "年" == string[4:5]:
if str_len > 11:
format = "%Y年%m月%d日 %H时%M分%S秒"
type_ = "datetime"
else:
format = "%Y年%m月%d日"
type_ = "date"
elif "时" in string[1:3]:
format = "%H时%M分%S秒"
type_ = "time"
elif " " in string and "-" not in string:
format = "%H:%M:%S"
type_ = "timedelta"
elif str_len == 19:
format = "%Y-%m-%d %H:%M:%S"
type_ = "datetime"
elif str_len == 14:
format = "%Y%m%d%H%M%S"
type_ = "datetime"
elif str_len == 10:
format = "%Y-%m-%d"
type_ = "date"
elif str_len == 8:
if string[2] == ":":
format = "%H:%M:%S"
type_ = "time"
else:
format = "%Y%m%d"
type_ = "date"
elif str_len == 6:
format = "%H%M%S"
type_ = "time"
elif str_len == 4:
format = "%Y"
type_ = "date"
else:
return string
# Get time format by parameter `type_`.
else:
if format is None:
format_dir = {
"datetime": "%Y-%m-%d %H:%M:%S",
"date": "%Y-%m-%d",
"time": "%H:%M:%S",
"timestamp": "%Y-%m-%d %H:%M:%S",
"timedelta": "%H:%M:%S"
}
format = format_dir[type_]
# Additional processing timedelta type.
if type_ == "timedelta":
if " " in string:
strings = string.split(" ")
day_str, string = strings[0], strings[-1]
else:
day = "0"
try:
day = int(day_str)
except ValueError:
if throw_e:
raise ValueError("failed to format string as time object")
return string
# Convert to time type.
try:
time_obj = datetime_datetime.strptime(string, format)
except ValueError:
if throw_e:
raise ValueError("failed to format string as time object")
return string
if type_ == "date":
time_obj = time_obj.date()
elif type_ == "time":
time_obj = time_obj.time()
elif type_ == "timestamp":
time_obj = int(time_obj.timestamp() * 1000)
elif type_ == "timedelta":
second = time_obj.second
second += day * 86400
time_obj = datetime_timedelta(seconds=second)
return time_obj
def sleep(*thresholds: Union[int, float], precision: Optional[int] = None) -> Union[int, float]:
"""
`Sleep` random seconds.
Parameters
----------
thresholds : Low and high thresholds of random range, range contains thresholds.
- When `length is 0`, then low and high thresholds is `0` and `10`.
- When `length is 1`, then sleep this value.
- When `length is 2`, then low and high thresholds is `thresholds[0]` and `thresholds[1]`.
precision : Precision of random range, that is maximum decimal digits of sleep seconds.
- `None` : Set to Maximum decimal digits of element of parameter `thresholds`.
- `int` : Set to this value.
Returns
-------
Random seconds.
- When parameters `precision` is `0`, then return int.
- When parameters `precision` is `greater than 0`, then return float.
"""
# Handle parameters.
thresholds_len = len(thresholds)
if thresholds_len == 0:
second = randn(0, 10, precision=precision)
elif thresholds_len == 1:
second = thresholds[0]
elif thresholds_len == 2:
second = randn(thresholds[0], thresholds[1], precision=precision)
else:
raise ValueError("number of parameter 'thresholds' must is 0 or 1 or 2")
# Sleep.
time_sleep(second)
return second
class RTimeMark():
"""
Rey`s date time `mark` type.
"""
def __init__(self) -> None:
"""
Mark now time.
"""
# Marking.
self.mark()
def mark(self) -> Dict[
Literal["index", "timestamp", "datetime", "datetime_str", "interval_timestamp", "interval_timedelta", "interval_timedelta_str"],
Optional[Union[str, float, datetime_datetime, datetime_timedelta]]
]:
"""
`Mark` now time and return mark time information.
Returns
-------
Mark time information.
"""
# Compatible with first marking.
if "record" not in self.__dir__():
self.record = []
# Get parametes.
record_len = len(self.record)
mark_info = {
"index": record_len,
"timestamp": now("timestamp"),
"datetime": now("datetime"),
"datetime_str": now(),
}
# Marking.
## First.
if record_len == 0:
mark_info["interval_timestamp"] = None
mark_info["interval_timedelta"] = None
mark_info["interval_timedelta_str"] = None
## Non first.
else:
last_datetime = self.record[-1]["datetime"]
last_timestamp = self.record[-1]["timestamp"]
mark_info["interval_timestamp"] = mark_info["timestamp"] - last_timestamp
mark_info["interval_timedelta"] = mark_info["datetime"] - last_datetime
mark_info["interval_timedelta_str"] = time2str(mark_info["interval_timedelta"])
self.record.append(mark_info)
return mark_info
def report(self) -> DataFrame:
"""
`Print` and return mark time information.
Returns
-------
DataFrame object of pandas package with mark time information.
"""
# Get parameters.
data = [
{
"timestamp": row["timestamp"],
"datetime": row["datetime_str"],
"interval": row["interval_timedelta_str"]
}
for row in self.record
]
# Generate report.
report_df = DataFrame(data, dtype=str)
interval_timedelta = self.record[-1]["datetime"] - self.record[0]["datetime"]
interval = time2str(interval_timedelta)
sum_df = DataFrame({"interval": interval}, index = ["sum"])
report_df = pd_concat([report_df, sum_df])
report_df.fillna("-", inplace=True)
# Report.
title = "Time Mark"
rprint(report_df, title=title)
return report_df | /reyapi-0.3-py3-none-any.whl/reytool/rdatetime.py | 0.917967 | 0.308937 | rdatetime.py | pypi |
def sqlalchemy_add_result_more_fetch():
"""
`Modify` package `sqlalchemy`, add more fetch methods to CursorResult object.
"""
from sqlalchemy.engine.cursor import CursorResult
from .rdata import to_table, to_dict, to_df, to_json, to_sql, to_html, to_csv, to_excel
# Fetch result as table in List[Dict] format.
CursorResult.fetch_table = to_table
# Fetch result as dictionary.
CursorResult.fetch_dict = to_dict
# Fetch result as DataFrame object.
CursorResult.fetch_df = to_df
# Fetch result as JSON string.
CursorResult.fetch_json = to_json
# Fetch result as SQL string.
CursorResult.fetch_sql = to_sql
# Fetch result as HTML string.
CursorResult.fetch_html = to_html
# Fetch result as save csv format file.
CursorResult.fetch_csv = to_csv
# Fetch result as save excel file.
CursorResult.fetch_excel = to_excel
# Update annotations.
class RResult(CursorResult):
"""
`Update` based on `CursorResult` object, for annotation return value.
"""
# Inherit document.
__doc__ = CursorResult.__doc__
# Add more fetch methods.
fetch_table = to_table
fetch_dict = to_dict
fetch_df = to_df
fetch_json = to_json
fetch_sql = to_sql
fetch_html = to_html
fetch_csv = to_csv
fetch_excel = to_excel
return RResult
def sqlalchemy_support_row_index_by_field():
"""
`Modify` package `sqlalchemy`, support Row object of package sqlalchemy index by field name.
"""
from typing import Any, Union, Sequence, overload
from sqlalchemy.engine.row import Row
# New method.
@overload
def __getitem__(self, index: Union[str, int, slice]) -> Union[Any, Sequence[Any]]: ...
@overload
def __getitem__(self, index: Union[str, int]) -> Any: ...
@overload
def __getitem__(self, index: slice) -> Sequence[Any]: ...
def __getitem__(self, index: Union[str, int, slice]) -> Union[Any, Sequence[Any]]:
"""
`Index` row value.
Parameters
----------
index : Field name or subscript or slice.
Returns
-------
Index result.
"""
# Index.
if index.__class__ == str:
value = self._mapping[index]
else:
value = self._data[index]
return value
# Modify index method.
Row.__getitem__ = __getitem__
def pprint_modify_format_width_judgment() -> None:
"""
Based on module `pprint.pformat`, `modify` the chinese width judgment.
"""
from pprint import PrettyPrinter, _recursion
from urwid import old_str_util
# Chinese width can be determined.
def get_width(text: str) -> int:
"""
`Get` text `display width`.
Parameters
----------
text : Text.
Returns
-------
Text display width.
"""
# Get width.
total_width = 0
for char in text:
char_unicode = ord(char)
char_width = old_str_util.get_width(char_unicode)
total_width += char_width
return total_width
# New method.
def _format(_self, object, stream, indent, allowance, context, level):
objid = id(object)
if objid in context:
stream.write(_recursion(object))
_self._recursive = True
_self._readable = False
return
rep = _self._repr(object, context, level)
max_width = _self._width - indent - allowance
width = get_width(rep)
if width > max_width:
p = _self._dispatch.get(type(object).__repr__, None)
if p is not None:
context[objid] = 1
p(_self, object, stream, indent, allowance, context, level + 1)
del context[objid]
return
elif isinstance(object, dict):
context[objid] = 1
_self._pprint_dict(object, stream, indent, allowance,
context, level + 1)
del context[objid]
return
stream.write(rep)
# Modify the chinese width judgment.
PrettyPrinter._format = _format | /reyapi-0.3-py3-none-any.whl/reytool/rmonkey.py | 0.72952 | 0.323968 | rmonkey.py | pypi |
from typing import Any, Tuple, Callable, Optional, Union, Literal, overload
from tqdm import tqdm as tqdm_tqdm
from threading import Thread
from functools import wraps as functools_wraps
from .rbase import exc
from .rtext import rprint
from .rdatetime import RTimeMark, now
from . import roption
def wrap_frame(decorator: Callable) -> Callable:
"""
Decorative `frame`.
Parameters
----------
decorator : Decorator function.
Retuens
-------
Decorator after decoration.
Examples
--------
Decoration function method one.
>>> @wrap_func
>>> def func(): ...
>>> result = func(param_a, param_b, param_c=1, param_d=2)
Decoration function method two.
>>> def func(): ...
>>> result = wrap_func(func, param_a, param_b, param_c=1, param_d=2)
Decoration function method three.
>>> def func(): ...
>>> result = wrap_func(func, _execute=True)
Decoration function method four.
>>> def func(): ...
>>> func = wrap_func(func)
>>> result = func(param_a, param_b, param_c=1, param_d=2)
Decoration function method five.
>>> def func(): ...
>>> func = wrap_func(func, param_a, param_c=1, _execute=False)
>>> result = func(param_b, param_d=2)
"""
# Decorate Decorator.
@functools_wraps(decorator)
def wrap(func: Callable, *args: Any, _execute: Optional[bool] = None, **kwargs: Any) -> Union[Callable, Any]:
"""
Decorative `shell`.
Parameters
----------
_execute : Whether execute function, otherwise decorate function.
- `None` : When parameter `args` or `kwargs` have values, then True, otherwise False.
- `bool` : Use this value.
Returns
-------
Function after decoration or return of function.
"""
# Handle parameters.
if _execute is None:
if args != () or kwargs != {}:
_execute = True
else:
_execute = False
# Direct execution.
if _execute:
result = decorator(func, *args, **kwargs)
return result
# Decorate function.
@functools_wraps(func)
def wrap_sub(*_args: object, **_kwargs: object) -> object:
"""
Decorative sub shell.
"""
# Decorate function.
result = decorator(func, *args, *_args, **kwargs, **_kwargs)
return result
return wrap_sub
return wrap
def wraps(*wrap_funcs: Callable) -> Callable:
"""
`Batch` decorate.
Parameters
----------
wrap_funcs : Decorator function.
Retuens
-------
Function after decoration.
Examples
--------
Decoration function.
>>> @wraps(print_funtime, state_thread)
>>> def func(): ...
>>> result = func()
Same up and down
>>> @print_funtime
>>> @state_thread
>>> def func(): ...
>>> result = func()
Same up and down
>>> def func(): ...
>>> func = print_funtime(func)
>>> func = state_thread(func)
>>> result = func()
"""
# Sequential decorate.
def func(): ...
for wrap_func in wrap_funcs:
## One shell.
@functools_wraps(func)
def wrap(func: Callable) -> Callable:
"""
Decorative shell
"""
## Two shell.
@functools_wraps(func)
def wrap_sub(*args: object, **kwargs: object) -> object:
"""
Decorative sub shell
"""
# Decorate.
result = wrap_func(func, *args, _execute=True, **kwargs)
return result
return wrap_sub
func = wrap
return wrap
@overload
def runtime(func: Callable, *args: Any, _return_report: bool = False, **kwargs: Any) -> Union[Any, Tuple[Any, str]]: ...
@overload
def runtime(_return_report: Literal[False]) -> Any: ...
@overload
def runtime(_return_report: Literal[True]) -> Union[Any, Tuple[Any, str]]: ...
@wrap_frame
def runtime(func: Callable, *args: Any, _return_report: bool = False, **kwargs: Any) -> Union[Any, Tuple[Any, str]]:
"""
Print or return `runtime` report of the function.
Parameters
----------
func : Function to be decorated.
args : Position parameter of input parameter decorated function.
_return_report : Whether return report, otherwise print report.
kwargs : Keyword parameter of input parameter decorated function.
Returns
-------
Function execute result or runtime report.
"""
# Execute function and marking time.
rtm = RTimeMark()
result = func(*args, **kwargs)
rtm.mark()
# Generate report.
runtime = rtm.record[-1]["interval_timestamp"] / 1000
report = "Start: %s -> Spend: %ss -> End: %s" % (
rtm.record[0]["datetime_str"],
runtime,
rtm.record[1]["datetime_str"]
)
title = func.__name__
# Return report.
if _return_report:
return result, report
# Print report.
rprint(report, title=title)
return result
@overload
def start_thread(func: Callable, *args: Any, _daemon: bool = True, **kwargs: Any) -> Thread: ...
@wrap_frame
def start_thread(func: Callable, *args: Any, _daemon: bool = True, **kwargs: Any) -> Thread:
"""
Function start in `thread`.
Parameters
----------
func : Function to be decorated.
args : Position parameter of input parameter decorated function.
_daemon : Whether it is a daemon thread.
kwargs : Keyword parameter of input parameter decorated function.
Returns
-------
Thread object.
"""
# Handle parameters.
thread_name = "%s_%d" % (func.__name__, now("timestamp"))
# Create thread.
thread = Thread(target=func, name=thread_name, args=args, kwargs=kwargs)
thread.daemon = _daemon
# Start thread.
thread.start()
return thread
@overload
def try_exc(func: Callable, *args: Any, **kwargs: Any) -> Optional[Any]: ...
@wrap_frame
def try_exc(func: Callable, *args: Any, **kwargs: Any) -> Optional[Any]:
"""
Execute function with `try` syntax and print exception information.
Parameters
----------
func : Function to be decorated.
args : Position parameter of input parameter decorated function.
kwargs : Keyword parameter of input parameter decorated function.
Returns
-------
Function execute result or no return.
"""
# Execute function.
try:
result = func(*args, **kwargs)
# Print exception information.
except:
func_name = func.__name__
exc(func_name)
# Return function result.
else:
return result
@overload
def update_tqdm(
func: Callable,
tqdm: tqdm_tqdm,
*args: Any,
_desc: Optional[str] = None,
_step: Union[int, float] = 1,
**kwargs: Any
) -> Any: ...
@wrap_frame
def update_tqdm(
func: Callable,
tqdm: tqdm_tqdm,
*args: Any,
_desc: Optional[str] = None,
_step: Union[int, float] = 1,
**kwargs: Any
) -> Any:
"""
Update progress bar `tqdm` object of `tqdm` package.
Parameters
----------
func : Function to be decorated.
tqdm : Progress bar tqdm object.
args : Position parameter of input parameter decorated function.
_desc : Progress bar description.
- `None` : no description.
- `str` : Add description.
_step : Progress bar step size.
- `When` greater than 0, then forward.
- `When` less than 0, then backward.
kwargs : Keyword parameter of input parameter decorated function.
Returns
-------
Function execute result.
"""
# Set description.
if _desc is not None:
tqdm.set_description(_desc)
# Execute function.
result = func(*args, **kwargs)
# Update progress bar.
tqdm.update(_step)
return result
@overload
def retry(
func: Callable,
*args: Any,
_report: Optional[str] = None,
_exception: Union[BaseException, Tuple[BaseException, ...]] = BaseException,
_try_total: int = 1,
_try_count: int = 0,
**kwargs: Any
) -> Any: ...
@wrap_frame
def retry(
func: Callable,
*args: Any,
_report: Optional[str] = None,
_exception: Union[BaseException, Tuple[BaseException, ...]] = BaseException,
_try_total: int = 1,
_try_count: int = 0,
**kwargs: Any
) -> Any:
"""
`Try` again.
Parameters
----------
func : Function to be decorated.
args : Position parameter of input parameter decorated function.
_report : Print report title.
- `None` : Not print.
- `str` : Print and use this title.
_exception : Catch exception types.
_try_total : Retry total.
_try_count : Retry count.
kwargs : Keyword parameter of input parameter decorated function.
Returns
-------
Function execute result.
"""
# Try count not full.
if _try_count < _try_total:
## Try.
try:
result = func(*args, **kwargs)
except _exception:
## Report.
if _report is not None:
exception_msg, _, _, _ = exc()
rprint(
exception_msg,
"Retrying...",
title=_report,
frame="half"
)
### Retry.
_try_count += 1
result = retry(
func,
*args,
_report=_report,
_exception=_exception,
_try_total=_try_total,
_try_count=_try_count,
**kwargs
)
# Try count full.
else:
result = func(*args, **kwargs)
return result | /reyapi-0.3-py3-none-any.whl/reytool/rwrap.py | 0.923017 | 0.287468 | rwrap.py | pypi |
from typing import Any, List, Tuple, Dict, Iterable, Optional, Literal, Union, ClassVar, NoReturn, overload
from re import findall
from sqlalchemy import create_engine as sqlalchemy_create_engine, text
from sqlalchemy.engine.base import Engine, Connection
from sqlalchemy.engine.cursor import CursorResult
from sqlalchemy.engine.url import URL
from sqlalchemy.sql.elements import TextClause
from sqlalchemy.exc import OperationalError
from pandas import DataFrame
from .rbase import get_first_notnull
from .rdata import to_table
from .rmonkey import sqlalchemy_add_result_more_fetch, sqlalchemy_support_row_index_by_field
from . import roption
from .rother import str2n
from .rregular import search
from .rtext import rprint
from .rwrap import runtime, retry
# Add more fetch methods to CursorResult object.
RResult = sqlalchemy_add_result_more_fetch()
# Support Row object of package sqlalchemy index by field name.
sqlalchemy_support_row_index_by_field()
class REngine(object):
"""
Rey's database `Engine` type, based on the package `sqlalchemy`.
"""
# Values to be converted to "NULL".
null_values: ClassVar[List] = ["", " ", b"", [], (), {}, set()]
@overload
def __init__(
self,
host: Optional[str] = None,
port: Optional[str] = None,
username: Optional[str] = None,
password: Optional[str] = None,
database: Optional[str] = None,
drivername: Optional[str] = None,
pool_size: int = 5,
max_overflow: int = 10,
pool_timeout: float = 30.0,
pool_recycle: Optional[int] = None,
url: Optional[Union[str, URL]] = None,
engine: Optional[Union[Engine, Connection]] = None,
**query: str
) -> None: ...
@overload
def __init__(self, username: None, url: None, engine: None) -> NoReturn: ...
@overload
def __init__(self, password: None, url: None, engine: None) -> NoReturn: ...
@overload
def __init__(self, host: None, url: None, engine: None) -> NoReturn: ...
@overload
def __init__(self, port: None, url: None, engine: None) -> NoReturn: ...
def __init__(
self,
host: Optional[str] = None,
port: Optional[str] = None,
username: Optional[str] = None,
password: Optional[str] = None,
database: Optional[str] = None,
drivername: Optional[str] = None,
pool_size: int = 5,
max_overflow: int = 10,
pool_timeout: float = 30.0,
pool_recycle: Optional[int] = None,
url: Optional[Union[str, URL]] = None,
engine: Optional[Union[Engine, Connection]] = None,
**query: str
) -> None:
"""
Create database `Engine` object and set parameters.
Parameters
----------
host : Server host.
port : Server port.
username : Server user name.
password : Server password.
database : Database name in the server.
drivername : Database backend and driver name.
- `None` : Auto select and try.
- `str` : Use this value.
pool_size : Number of connections `keep open`.
max_overflow : Number of connections `allowed overflow`.
pool_timeout : Number of seconds `wait create` connection.
pool_recycle : Number of seconds `recycle` connection.
- `None` : Use database variable `wait_timeout` value.
- `Literal[-1]` : No recycle.
- `int` : Use this value.
url: Get parameters from server `URL`, but preferred input parameters.
Parameters include `username`, `password`, `host`, `port`, `database`, `drivername`, `query`.
engine : Use existing `Engine` or `Connection` object, and get parameters from it.
Parameters include `username`, `password`, `host`, `port`, `database`, `drivername`, `query`,
`pool_size`, `max_overflow`, `pool_timeout`, `pool_recycle`.
query : Server parameters.
"""
# From existing Engine or Connection object.
if engine is not None:
## Extract Engine object from Connection boject.
if engine.__class__ == Connection:
engine = engine.engine
## Extract parameters.
params = self.extract_from_engine(engine)
## Set.
self.drivername = params["drivername"]
self.username = params["username"]
self.password = params["password"]
self.host = params["host"]
self.port = params["port"]
self.database = params["database"]
self.query = params["query"]
self.pool_size = params["pool_size"]
self.max_overflow = params["max_overflow"]
self.pool_timeout = params["pool_timeout"]
self.pool_recycle = params["pool_recycle"]
self.engine = engine
# From parameters create.
else:
## Extract parameters from URL.
if url is not None:
params = self.extract_from_url(url)
else:
params = dict.fromkeys(
(
"drivername",
"username",
"password",
"host",
"port",
"database",
"query"
)
)
## Set parameters by priority.
self.drivername = get_first_notnull(drivername, params["drivername"])
self.username = get_first_notnull(username, params["username"], default="exception")
self.password = get_first_notnull(password, params["password"], default="exception")
self.host = get_first_notnull(host, params["host"], default="exception")
self.port = get_first_notnull(port, params["port"], default="exception")
self.database = get_first_notnull(database, params["database"])
self.query = get_first_notnull(query, params["query"], default={"charset": "utf8"}, null_values=[None, {}])
self.pool_size = pool_size
self.max_overflow = max_overflow
self.pool_timeout = pool_timeout
## Create Engine object.
### Set number of seconds recycle connection.
if pool_recycle is None:
self.pool_recycle = -1
self.engine = self.create_engine()
variables = self.get_variables()
self.pool_recycle = int(variables["wait_timeout"])
self.engine.pool._recycle = int(variables["wait_timeout"])
else:
self.pool_recycle = pool_recycle
self.engine = self.create_engine()
def extract_from_url(self, url: Union[str, URL]) -> Dict[
Literal["drivername", "username", "password", "host", "port", "database", "query"],
Optional[Union[str, Dict[str, str]]]
]:
"""
Extract parameters from `URL` of string.
Parameters
----------
url : URL of string.
Returns
-------
Extracted parameters.
"""
# Extract.
## When str object.
if url.__class__ == str:
pattern = "^([\w\+]+)://(\w+):(\w+)@(\d+\.\d+\.\d+\.\d+):(\d+)[/]?([\w/]+)?[\?]?([\w&=]+)?$"
result = search(pattern, url)
if result is None:
raise ValueError("the value of parameter 'url' is incorrect")
(
drivername,
username,
password,
host,
port,
database,
query_str
) = result
if query_str is not None:
pattern = "(\w+)=(\w+)"
query_findall = findall(pattern, query_str)
query = {key: val for key, val in query_findall}
else:
query = {}
## When URL object.
elif url.__class__ == URL:
drivername = url.drivername
username = url.username
password = url.password
host = url.host
port = url.port
database = url.database
query = dict(url.query)
# Generate parameters.
params = {
"drivername": drivername,
"username": username,
"password": password,
"host": host,
"port": port,
"database": database,
"query": query
}
return params
def extract_from_engine(self, engine: Union[Engine, Connection]) -> Dict[
Literal[
"drivername", "username", "password", "host", "port", "database", "query",
"pool_size", "max_overflow", "pool_timeout", "pool_recycle"
],
Optional[Union[str, Dict[str, str], float]]
]:
"""
Extract parameters from `Engine` or `Connection` object.
Parameters
----------
engine : Engine or Connection object.
Returns
-------
Extracted parameters.
"""
## Extract Engine object from Connection boject.
if engine.__class__ == Connection:
engine = engine.engine
## Extract.
drivername = engine.url.drivername
username = engine.url.username
password = engine.url.password
host = engine.url.host
port = engine.url.port
database = engine.url.database
query = dict(engine.url.query)
pool_size = engine.pool._pool.maxsize
max_overflow = engine.pool._max_overflow
pool_timeout = engine.pool._timeout
pool_recycle = engine.pool._recycle
# Generate parameters.
params = {
"drivername": drivername,
"username": username,
"password": password,
"host": host,
"port": port,
"database": database,
"query": query,
"pool_size": pool_size,
"max_overflow": max_overflow,
"pool_timeout": pool_timeout,
"pool_recycle": pool_recycle
}
return params
def url(self) -> str:
"""
Generate server `URL`.
Returns
-------
Server URL.
"""
# Generate URL.
_url = f"{self.drivername}://{self.username}:{self.password}@{self.host}:{self.port}"
# Add database path.
if not self.database is None:
_url = f"{_url}/{self.database}"
# Add Server parameters.
if self.query != {}:
query = "&".join(
[
"%s=%s" % (key, val)
for key, val in self.query.items()
]
)
_url = f"{_url}?{query}"
return _url
def create_engine(self) -> Engine:
"""
Create database `Engine` object.
Returns
-------
Engine object.
"""
# Handle parameters.
if self.drivername is None:
drivernames = ("mysql+mysqldb", "mysql+pymysql")
else:
drivernames = (self.drivername,)
# Create Engine object.
for drivername in drivernames:
self.drivername = drivername
url = self.url()
## Try create.
try:
engine = sqlalchemy_create_engine(
url,
pool_size=self.pool_size,
max_overflow=self.max_overflow,
pool_timeout=self.pool_timeout,
pool_recycle=self.pool_recycle
)
except ModuleNotFoundError:
pass
else:
return engine
# Throw exception.
drivernames_str = " and ".join(
[
dirvername.split("+", 1)[-1]
for dirvername in drivernames
]
)
raise ModuleNotFoundError("module %s not fund" % drivernames_str)
def count_connection(self) -> Tuple[int, int]:
"""
Count number of `keep open` and `allowed overflow` connection.
Returns
-------
Number of keep open and allowed overflow connection.
"""
# Count.
_overflow = self.engine.pool._overflow
if _overflow < 0:
keep_n = self.pool_size + _overflow
overflow_n = 0
else:
keep_n = self.pool_size
overflow_n = _overflow
return keep_n, overflow_n
def fill_data(
self,
data: Union[Dict, List[Dict]],
sql: Union[str, TextClause],
) -> List[Dict]:
"""
`Fill` missing data according to contents of `TextClause` object of package `sqlalchemy`, and filter out empty Dict.
Parameters
----------
data : Data set for filling.
sql : SQL in method sqlalchemy.text format, or TextClause object.
Returns
-------
Filled data.
"""
# Handle parameters.
if data.__class__ == dict:
data = [data]
if sql.__class__ == TextClause:
sql = sql.text
# Filter out empty Dict.
data = [
param
for param in data
if param != {}
]
# Extract fill field names.
pattern = "(?<!\\\):(\w+)"
sql_keys = findall(pattern, sql)
# Fill data.
for param in data:
for key in sql_keys:
val = param.get(key)
if val in self.null_values:
val = None
param[key] = val
return data
def get_syntax(self, sql: Union[str, TextClause]) -> str:
"""
Extract `syntax` type form SQL.
Parameters
----------
sql : SQL text or TextClause object.
Returns
-------
SQL syntax type.
"""
# Handle parameters.
if sql.__class__ == TextClause:
sql = sql.text
# Extract.
split_sql = sql.split(maxsplit=1)
syntax_type = split_sql[0]
syntax_type = syntax_type.upper()
return syntax_type
def executor(
self,
connection: Connection,
sql: TextClause,
data: List[Dict],
report: bool
) -> RResult:
"""
`SQL` executor.
Parameters
----------
connection : Connection object.
sql : TextClause object.
data : Data set for filling.
report : Whether report SQL execute information.
Returns
-------
CursorResult object of alsqlchemy package.
"""
# When REngine.
if self.__class__ == REngine:
## Create Transaction object.
with connection.begin():
## Execute.
### Report.
if report:
result, report_runtime = runtime(connection.execute, sql, data, _return_report=True)
report_info = "%s\nRow Count: %d" % (report_runtime, result.rowcount)
if data == []:
rprint(report_info, sql, title="SQL")
else:
rprint(report_info, sql, data, title="SQL")
### Not report.
else:
result = connection.execute(sql, data)
# When RConnection.
elif self.__class__ == RConnection:
## Create Transaction object.
if self.begin_count == 0:
self.rollback()
self.begin = connection.begin()
## Execute.
### Report.
if report:
result, report_runtime = runtime(connection.execute, sql, data, _return_report=True)
report_info = "%s\nRow Count: %d" % (report_runtime, result.rowcount)
if data == []:
rprint(report_info, sql, title="SQL")
else:
rprint(report_info, sql, data, title="SQL")
### Not report.
else:
result = connection.execute(sql, data)
## Count.
syntax = self.get_syntax(sql)
if syntax in ("UPDATE", "INSERT", "DELETE"):
self.begin_count += 1
return result
def execute(
self,
sql: Union[str, TextClause],
data: Optional[Union[List[Dict], Dict, CursorResult, DataFrame]] = None,
report: bool = None,
**kwdata: Any
) -> RResult:
"""
Execute `SQL`.
Parameters
----------
sql : SQL in method `sqlalchemy.text` format, or `TextClause` object.
data : Data set for filling.
report : Whether report SQL execute information.
- `None` : Use `report_execute_info` of module `roption`.
- `int` : Use this value.
kwdata : Keyword data for filling.
Returns
-------
CursorResult object of alsqlchemy package.
"""
# Get parameters by priority.
report = get_first_notnull(report, roption.report_execute_info)
# Handle parameters.
if sql.__class__ == str:
sql = text(sql)
if data is None:
data = [kwdata]
else:
if data.__class__ == dict:
data = [data]
elif isinstance(data, CursorResult):
data = to_table(data)
elif data.__class__ == DataFrame:
data = to_table(data)
else:
data = data.copy()
for param in data:
param.update(kwdata)
# Fill missing data.
data = self.fill_data(data, sql)
# Execute.
## When REngine.
if self.__class__ == REngine:
### Create Connection object.
with self.engine.connect() as connection:
### Can retry.
result = retry(
self.executor,
connection,
sql,
data,
report,
_report="Database execute operational error",
_exception=OperationalError
)
## When RConnection.
elif self.__class__ == RConnection:
### Can retry when not counted.
if self.begin_count == 0:
result = retry(
self.executor,
self.connection,
sql,
data,
report,
_report="Database execute operational error",
_exception=OperationalError
)
### Cannot retry when counted.
else:
result = self.executor(self.connection, sql, data, report)
return result
def execute_select(
self,
table: str,
database: Optional[str] = None,
fields: Optional[Union[str, Iterable]] = None,
where: Optional[str] = None,
group: Optional[str] = None,
having: Optional[str] = None,
order: Optional[str] = None,
limit: Optional[Union[int, str, List, Tuple]] = None,
report: bool = None,
**kwdata: Any
) -> RResult:
"""
Execute `select` SQL.
Parameters
----------
table : Table name.
database : Database name.
fields : Select clause content.
- `None` : Is `SELECT *`.
- `str` : Join as `SELECT str`.
- `Iterable[str]` : Join as `SELECT \`str\`, ...`.
where : Clause `WHERE` content, join as `WHERE str`.
group : Clause `GROUP BY` content, join as `GROUP BY str`.
having : Clause `HAVING` content, join as `HAVING str`.
order : Clause `ORDER BY` content, join as `ORDER BY str`.
limit : Clause `LIMIT` content.
- `Union[int, str]` : Join as `LIMIT int/str`.
- `Union[List, Tuple]` with length of 1 or 2 `int/str` : Join as `LIMIT int/str [, int/str]`.
report : Whether report SQL execute information.
- `None` : Use `report_execute_info` of module `roption`.
- `int` : Use this value.
kwdata : Keyword data for filling.
Returns
-------
CursorResult object of alsqlchemy package.
"""
# Handle parameters.
if database is None:
_database = self.database
else:
_database = database
# Generate SQL.
sqls = []
## Part 'SELECT' syntax.
if fields is None:
fields = "*"
elif fields.__class__ != str:
fields = ",".join(["`%s`" % field for field in fields])
sql_select = f"SELECT {fields}"
sqls.append(sql_select)
## Part 'FROM' syntax.
sql_from = f"FROM `{_database}`.`{table}`"
sqls.append(sql_from)
## Part 'WHERE' syntax.
if where is not None:
sql_where = "WHERE %s" % where
sqls.append(sql_where)
## Part 'GROUP BY' syntax.
if group is not None:
sql_group = "GROUP BY %s" % group
sqls.append(sql_group)
## Part 'GROUP BY' syntax.
if having is not None:
sql_having = "HAVING %s" % having
sqls.append(sql_having)
## Part 'ORDER BY' syntax.
if order is not None:
sql_order = "ORDER BY %s" % order
sqls.append(sql_order)
## Part 'LIMIT' syntax.
if limit is not None:
if limit.__class__ in (str, int):
sql_limit = f"LIMIT {limit}"
else:
if len(limit) in (1, 2):
limit_content = ",".join([str(val) for val in limit])
sql_limit = "LIMIT %s" % limit_content
else:
raise ValueError("The length of the parameter 'limit' value must be 1 or 2")
sqls.append(sql_limit)
sql = "\n".join(sqls)
# Execute SQL.
result = self.execute(sql, report=report, **kwdata)
return result
def execute_update(
self,
data: Union[List[Dict], Dict, CursorResult, DataFrame],
table: str,
database: Optional[str] = None,
where_fields: Optional[Union[str, Iterable[str]]] = None,
report: bool = None
) -> Optional[RResult]:
"""
`Update` the data of table in the datebase.
Parameters
----------
data : Updated data.
table : Table name.
database : Database name.
where_fields : Clause `WHERE` clause content.
- `None` : The first key value pair of each item is judged.
- `str` : This key value pair of each item is judged.
- `Iterable[str]` : Multiple judged, `and` relationship.
report : Whether report SQL execute information.
- `None` : Use `report_execute_info` of module `roption`.
- `int` : Use this value.
Returns
-------
None or CursorResult object.
- `None` : When the data is empty.
- `CursorResult object` : When the data is not empty.
"""
# Handle parameters.
if data.__class__ == dict:
data = [data]
elif isinstance(data, CursorResult):
data = to_table(data)
elif data.__class__ == DataFrame:
data = to_table(data)
if database is None:
_database = self.database
else:
_database = database
# If data is empty, not execute.
if data in ([], [{}]):
return
# Generate SQL.
data_flatten = {}
sqls = []
if where_fields is None:
no_where = True
else:
no_where = False
if where_fields.__class__ == str:
where_fields = [where_fields]
for index, row in enumerate(data):
for key, val in row.items():
index_key = "%d_%s" % (index, key)
data_flatten[index_key] = val
if no_where:
where_fields = [list(row.keys())[0]]
set_content = ",".join(
[
"`%s` = :%d_%s" % (key, index, key)
for key in row
if key not in where_fields
]
)
where_content = "\n AND ".join(
[
f"`{field}` = :{index}_{field}"
for field in where_fields
]
)
sql = (
f"UPDATE `{_database}`.`{table}`\n"
f"SET {set_content}\n"
f"WHERE {where_content}"
)
sqls.append(sql)
sqls = ";\n".join(sqls)
# Execute SQL.
result = self.execute(sqls, data_flatten, report)
return result
def execute_insert(
self,
data: Union[List[Dict], Dict, CursorResult, DataFrame],
table: str,
database: Optional[str] = None,
duplicate_method: Optional[Literal["ignore", "update"]] = None,
report: bool = None
) -> Optional[RResult]:
"""
`Insert` the data of table in the datebase.
Parameters
----------
data : Updated data.
table : Table name.
database : Database name.
duplicate_method : Handle method when constraint error.
- `None` : Not handled.
- `ignore` : Use `UPDATE IGNORE INTO` clause.
- `update` : Use `ON DUPLICATE KEY UPDATE` clause.
report : Whether report SQL execute information.
- `None` : Use `report_execute_info` of module `roption`.
- `int` : Use this value.
Returns
-------
None or CursorResult object.
- `None` : When the data is empty.
- `CursorResult` object : When the data is not empty.
"""
# Handle parameters.
if data.__class__ == dict:
data = [data]
elif isinstance(data, CursorResult):
data = to_table(data)
elif data.__class__ == DataFrame:
data = to_table(data)
if database is None:
_database = self.database
else:
_database = database
# If data is empty, not execute.
if data in ([], [{}]):
return
# Generate SQL.
fields = list({key for row in data for key in row})
fields_str = ",".join(["`%s`" % field for field in fields])
fields_str_position = ",".join([":" + field for field in fields])
if duplicate_method == "ignore":
sql = (
f"INSERT IGNORE INTO `{_database}`.`{table}`({fields_str})\n"
f"VALUES({fields_str_position})"
)
elif duplicate_method == "update":
update_content = ",".join(["`%s` = VALUES(`%s`)" % (field, field) for field in fields])
sql = (
f"INSERT INTO `{_database}`.`{table}`({fields_str})\n"
f"VALUES({fields_str_position})\n"
"ON DUPLICATE KEY UPDATE\n"
f"{update_content}"
)
else:
sql = (
f"INSERT INTO `{_database}`.`{table}`({fields_str})\n"
f"VALUES({fields_str_position})"
)
# Execute SQL.
result = self.execute(sql, data, report)
return result
@overload
def execute_exist(
self,
table: str,
database: Optional[str] = None,
where: Optional[str] = None,
count: bool = False,
report: bool = None,
**kwdata: Any
) -> Union[bool, int]: ...
@overload
def execute_exist(
self,
count: Literal[False]
) -> bool: ...
@overload
def execute_exist(
self,
count: Literal[True]
) -> int: ...
def execute_exist(
self,
table: str,
database: Optional[str] = None,
where: Optional[str] = None,
count: bool = False,
report: bool = None,
**kwdata: Any
) -> Union[bool, int]:
"""
`Count` records.
Parameters
----------
table : Table name.
database : Database name.
where : Match condition, `WHERE` clause content, join as `WHERE str`.
- `None` : Match all.
- `str` : Match condition.
count : Whether return match count, otherwise return whether it exist.
report : Whether report SQL execute information.
- `None` : Use `report_execute_info` of module `roption`.
- `int` : Use this value.
kwdata : Keyword data for filling.
Returns
-------
CursorResult object of alsqlchemy package.
"""
# Get parameters.
if count:
limit = None
else:
limit = 1
# Execute.
result = self.execute_select(table, database, "1", where=where, limit=limit, report=report, **kwdata)
# Returns.
rowcount = result.rowcount
if count:
return rowcount
else:
return rowcount != 0
def get_variables(self, global_: bool = False) -> Dict[str, str]:
"""
Get `variables` of database.
Parameters
----------
global_ : Whether get global variable, otherwise get local variable.
"""
# Generate SQL.
if global_:
sql = "SHOW GLOBAL VARIABLES"
else:
sql = "SHOW VARIABLES"
# Execute SQL.
result = self.execute(sql)
# Convert dictionary.
variables = result.fetch_dict()
return variables
def get_status(self, global_: bool = False) -> Dict[str, str]:
"""
Get `status` of database.
Parameters
----------
global_ : Whether get global variable, otherwise get local variable.
"""
# Generate SQL.
if global_:
sql = "SHOW GLOBAL STATUS"
else:
sql = "SHOW STATUS"
# Execute SQL.
result = self.execute(sql)
# Convert dictionary.
status = result.fetch_dict()
return status
def update_variables(self, params: Dict[str, Union[str, float]], global_: bool = False) -> None:
"""
Update `variables` of database.
Parameters
----------
params : Update parameter key value pairs.
global_ : Whether update global variable, otherwise update local variable.
"""
# Generate SQL.
if global_:
sql_global = " GLOBAL"
else:
sql_global = ""
sqls = [
"SET%s %s = %s" % (
sql_global,
key,
(
val
if val.__class__ in (int, float)
else "'%s'" % val
)
)
for key, val in params.items()
]
sqls = ";\n".join(sqls)
# Execute SQL.
self.execute(sqls)
def connect(self):
"""
Create database `Connection` object.
"""
rconnection = RConnection(
self.engine.connect(),
self
)
return rconnection
class RConnection(REngine):
"""
Rey's database `Connection` type, based on the package `sqlalchemy`.
"""
def __init__(
self,
connection: Connection,
rengine: REngine
) -> None:
"""
Create database `Connection` object and set parameters.
Parameters
----------
connection : Connection object.
rengine : REngine object.
"""
self.connection = connection
self.rengine = rengine
self.begin = None
self.begin_count = 0
self.drivername = rengine.drivername
self.username = rengine.username
self.password = rengine.password
self.host = rengine.host
self.port = rengine.port
self.database = rengine.database
self.query = rengine.query
self.pool_recycle = rengine.pool_recycle
def commit(self) -> None:
"""
`Commit` cumulative executions.
"""
# Commit.
if not self.begin is None:
self.begin.commit()
self.begin = None
self.begin_count = 0
def rollback(self) -> None:
"""
`Rollback` cumulative executions.
"""
# Rollback.
if not self.begin is None:
self.begin.rollback()
self.begin = None
self.begin_count = 0
def close(self) -> None:
"""
`Close` database connection.
"""
# Close.
self.connection.close()
def __del__(self) -> None:
"""
`Close` database connection.
"""
# Close.
self.close() | /reyapi-0.3-py3-none-any.whl/reytool/rdatabase.py | 0.868938 | 0.205755 | rdatabase.py | pypi |
from typing import Any, List, Dict, Iterable, Literal, Optional, Union, Type, NoReturn, overload
from os.path import abspath as os_abspath
from pandas import DataFrame, ExcelWriter, isnull
from sqlalchemy.engine.cursor import CursorResult
from .rbase import is_iterable, check_least_one, check_most_one, to_type
from .rdatetime import time2str
def to_table(
data: Union[CursorResult, DataFrame, List[Dict], Iterable[Iterable]],
fields: Optional[Iterable] = None
) -> List[Dict]:
"""
Fetch data to table in `List[Dict]` format, keys and keys sort of the dictionary are the same.
Parameters
----------
data : Data.
fields : Table fields.
- `None` : Infer.
- `Iterable` : Use values in Iterable.
Returns
-------
Table in `List[Dict]` format.
"""
# Convert.
## From CursorResult object.
if isinstance(data, CursorResult):
if fields is None:
fields = data.keys()
data_table = [
dict(zip(fields, row))
for row in data
]
## From DataFrame object.
elif data.__class__ == DataFrame:
data_df = to_df(data, fields)
fields = data_df.columns
data_table = [
dict(zip(
fields,
[
None if isnull(val) else val
for val in row
]
))
for row in data_df.values
]
## From other object.
else:
data_df = to_df(data, fields)
data_table = to_table(data_df)
return data_table
def to_dict(
data: Union[CursorResult, DataFrame, List[Dict], Iterable[Iterable]],
key_field: Union[int, str] = 0,
val_field: Union[int, str] = 1
) -> List[Dict]:
"""
Fetch result as `dictionary`.
Parameters
----------
data : Data.
key_field : Key field of dictionary.
- `int` : Subscript index.
- `str` : Name index.
val_field : Value field of dictionary.
- `int` : Subscript index.
- `str` : Name index.
Returns
-------
Dictionary.
"""
# Get fields of table.
data = to_table(data)
fields = list(data[0].keys())
if key_field.__class__ == int:
key_field = fields[key_field]
if val_field.__class__ == int:
val_field = fields[val_field]
# Convert.
data_dict = {
row[key_field]: row[val_field]
for row in data
}
return data_dict
def to_df(data: Union[CursorResult, DataFrame, List[Dict], Iterable[Iterable]], fields: Optional[Iterable] = None) -> DataFrame:
"""
Fetch data to table of `DataFrame` object.
Parameters
----------
data : Data.
fields : Table fields.
- `None` : Infer.
- `Iterable` : Use values in Iterable.
Returns
-------
DataFrame object.
"""
# Convert.
## From CursorResult object.
if isinstance(data, CursorResult):
if fields is None:
fields = data.keys()
data_df = DataFrame(data, columns=fields)
data_df = data_df.convert_dtypes()
## From DataFrame object.
elif data.__class__ == DataFrame:
data_df = data.convert_dtypes()
if fields is not None:
data_df.columns = fields
## From other object.
else:
data_df = DataFrame(data, columns=fields)
data_df = data_df.convert_dtypes()
return data_df
def to_json(data: Union[CursorResult, DataFrame, List[Dict], Iterable[Iterable]], fields: Optional[Iterable] = None) -> str:
"""
Fetch data to `JSON` string.
Parameters
----------
data : Data.
fields : Table fields.
- `None` : Infer.
- `Iterable` : Use values in Iterable.
Returns
-------
JSON string.
"""
# Handle parameters.
data_df = to_df(data, fields)
# Convert.
data_json = data_df.to_json(orient="records", force_ascii=False)
return data_json
def to_sql(data: Union[CursorResult, DataFrame, List[Dict], Iterable[Iterable]], fields: Optional[Iterable] = None) -> str:
"""
Fetch data to `SQL` string.
Parameters
----------
data : Data.
fields : Table fields.
- `None` : Infer.
- `Iterable` : Use values in Iterable.
Returns
-------
SQL string.
"""
# Get fields of table.
if isinstance(data, CursorResult):
if fields is None:
fields = data.keys()
else:
data = to_table(data, fields)
fields = data[0].keys()
# Generate SQL.
sql_rows_values = [
[
repr(time2str(val, "%Y-%m-%d %H:%M:%S"))
if val is not None
else "NULL"
for val in row
]
for row in data
]
sql_rows = [
"SELECT " + ",".join(row_values)
for row_values in sql_rows_values
]
sql_row_first = "SELECT " + ",".join(
[
"%s AS `%s`" % (val, key)
for key, val in list(zip(fields, sql_rows_values[0]))
]
)
sql_rows[0] = sql_row_first
data_sql = " UNION ALL ".join(sql_rows)
return data_sql
def to_html(data: Union[CursorResult, DataFrame, List[Dict], Iterable[Iterable]], fields: Optional[Iterable] = None) -> str:
"""
Fetch data to `HTML` string.
Parameters
----------
data : Data.
fields : Table fields.
- `None` : Infer.
- `Iterable` : Use values in Iterable.
Returns
-------
HTML string.
"""
# Handle parameters.
data_df = to_df(data, fields)
# Convert.
data_html = data_df.to_html(col_space=50, index=False, justify="center")
return data_html
def to_csv(
data: Union[CursorResult, DataFrame, Iterable[Dict], Iterable],
path: str = "data.csv",
fields: Optional[Iterable] = None
) -> str:
"""
Fetch data to save `csv` format file.
Parameters
----------
data : Data.
path : File save path.
fields : Table fields.
- `None` : Infer.
- `Iterable` : Use values in Iterable.
Returns
-------
File absolute path.
"""
# Handle parameters.
data_df = to_df(data, fields)
path = os_abspath(path)
# Save file.
data_df.to_csv(path, mode="a")
return path
def to_excel(
data: Union[CursorResult, DataFrame, Iterable[Dict], Iterable],
path: str = "data.xlsx",
group_field: Optional[str] = None,
sheets_set: Dict[Union[str, int], Dict[Literal["name", "index", "filter"], Union[str, int, List[str]]]] = {}
) -> str:
"""
Fetch data to save `excel` format file and return sheet name and sheet data.
Parameters
----------
data : Data.
path : File save path.
group_field : Group filed.
sheets_set : Set sheet new name and sort sheet and filter sheet fields,
key is old name or index, value is set parameters.
- Parameter `name` : Set sheet new name.
- Parameter `index` : Sort sheet.
- Parameter `filter` : Filter sheet fields.
Returns
-------
File absolute path.
"""
# Handle parameters.
if data.__class__ != DataFrame:
data = to_df(data)
path = os_abspath(path)
# Generate sheets.
if group_field is None:
data_group = (("Sheet1", data),)
else:
data_group = data.groupby(group_field)
sheets_table_before = []
sheets_table_after = []
for index, sheet_table in enumerate(data_group):
sheet_name, sheet_df = sheet_table
if group_field is not None:
del sheet_df[group_field]
if sheet_name in sheets_set:
sheet_set = sheets_set[sheet_name]
elif index in sheets_set:
sheet_set = sheets_set[index]
else:
sheets_table_after.append((sheet_name, sheet_df))
continue
if "name" in sheet_set:
sheet_name = sheet_set["name"]
if "filter" in sheet_set:
sheet_df = sheet_df[sheet_set["filter"]]
if "index" in sheet_set:
sheets_table_before.append((sheet_set["index"], (sheet_name, sheet_df)))
else:
sheets_table_after.append((sheet_name, sheet_df))
sort_func = lambda item: item[0]
sheets_table_before.sort(key=sort_func)
sheets_table = [sheet_table for sheet_index, sheet_table in sheets_table_before] + sheets_table_after
# Save file.
excel = ExcelWriter(path)
for sheet_name, sheet_df in sheets_table:
sheet_df.to_excel(excel, sheet_name, index=False)
excel.close()
return path
def count(
data: Any,
count_value: Dict = {"size": 0, "total": 0, "types": {}},
_surface: bool = True
) -> Dict[Literal["size", "total", "types"], Union[int, Dict[Type, int]]]:
"""
`Count` data element.
Parameters
----------
data : Data.
count_value : Cumulative Count.
_surface : Whether is surface recursion.
Returns
-------
Count data.
"""
# Count Element.
count_value["total"] += 1
count_value["types"][data.__class__] = count_value["types"].get(data.__class__, 0) + 1
# Recursion.
if data.__class__ == dict:
for element in data.values():
count(element, count_value, False)
elif is_iterable(data):
for element in data:
count(element, count_value, False)
else:
count_value["size"] = count_value["size"] + 1
# End Recursion and return.
if _surface:
## Sort by count.
sorted_func = lambda key: count_value["types"][key]
sorted_key = sorted(count_value["types"], key=sorted_func, reverse=True)
count_value["types"] = {key: count_value["types"][key] for key in sorted_key}
return count_value
def flatten(data: Any, flattern_data: List = []) -> List:
"""
`Flatten` data.
Parameters
----------
data : Data.
flattern_data : Recursion cumulative data.
Returns
-------
Data after flatten.
"""
# Flatten.
## Recursion dict object.
if data.__class__ == dict:
for element in data.values():
_ = flatten(element, flattern_data)
## Recursion iterator.
elif is_iterable(data):
for element in data:
_ = flatten(element, flattern_data)
## Other.
else:
flattern_data.append(data)
return flattern_data
@overload
def split(data: Iterable, share: Optional[int] = None, bin_size: Optional[int] = None) -> List[List]: ...
@overload
def split(share: None, bin_size: None) -> NoReturn: ...
@overload
def split(share: int, bin_size: int) -> NoReturn: ...
def split(data: Iterable, share: Optional[int] = None, bin_size: Optional[int] = None) -> List[List]:
"""
`Split` data into multiple data.
Parameters
----------
data : Data.
share : Number of splie share.
bin_size : Size of each bin.
Returns
-------
Split data.
"""
# Check parameters.
check_least_one(share, bin_size)
check_most_one(share, bin_size)
# Handle parameters.
data = list(data)
# Split.
data_len = len(data)
_data = []
_data_len = 0
## by number of share.
if share is not None:
average = data_len / share
for n in range(share):
bin_size = int(average * (n + 1)) - int(average * n)
_data = data[_data_len:_data_len + bin_size]
_data.append(_data)
_data_len += bin_size
## By size of bin.
elif bin_size is not None:
while True:
_data = data[_data_len:_data_len + bin_size]
_data.append(_data)
_data_len += bin_size
if _data_len > data_len:
break
return _data
def unique(data: Iterable) -> List:
"""
`De duplication` of data.
Parameters
----------
data : Data.
Returns
-------
List after de duplication.
"""
# Handle parameters.
data = to_type(data, tuple)
# Delete duplicate.
data_unique = list(set(data))
data_unique.sort(key=data.index)
return data_unique
def ins(obj: Any, *arrays: Iterable) -> bool:
"""
`Judge` whether the object is in multiple array.
Parameters
----------
obj : Judge object.
arrays : Array.
Returns
-------
Judge result.
"""
# Judge.
for array in arrays:
if obj in array:
return True
return False
def mutual_in(*arrays: Iterable) -> bool:
"""
Whether the same element exists in `multiple` array.
Parameters
----------
arrays : Array.
Returns
-------
Judge result.
"""
# Handle parameters.
arrays = list(arrays)
# Judge.
for n, array in enumerate(arrays):
for after_array in arrays[n+1:]:
for element in array:
if ins(element, after_array):
return True
return False | /reyapi-0.3-py3-none-any.whl/reytool/rdata.py | 0.925145 | 0.600086 | rdata.py | pypi |
from typing import Any, List, Tuple, Literal, Optional, Union
from os import (
walk as os_walk,
listdir as os_listdir
)
from os.path import (
abspath as os_abspath,
join as os_join,
isfile as os_isfile,
isdir as os_isdir
)
from random import randint as random_randint
from .rbase import is_number_str
def digits(number: Union[int, float]) -> Tuple[int, int]:
"""
`Judge` the number of integer digits and deciaml digits.
Parameters
----------
number : Number to judge.
Returns
-------
Integer digits and deciaml digits.
"""
# Handle parameters.
number_str = str(number)
# Get digits.
if "." in number_str:
integer_str, decimal_str = number_str.split(".")
integer_digits = len(integer_str)
deciaml_digits = len(decimal_str)
else:
integer_digits = len(number_str)
deciaml_digits = 0
return integer_digits, deciaml_digits
def randn(*thresholds: Union[int, float], precision: Optional[int] = None) -> Union[int, float]:
"""
`Get` random number.
Parameters
----------
thresholds : Low and high thresholds of random range, range contains thresholds.
- When `length is 0`, then low and high thresholds is `0` and `10`.
- When `length is 1`, then low and high thresholds is `0` and `thresholds[0]`.
- When `length is 2`, then low and high thresholds is `thresholds[0]` and `thresholds[1]`.
precision : Precision of random range, that is maximum decimal digits of return value.
- `None` : Set to Maximum decimal digits of element of parameter `thresholds`.
- `int` : Set to this value.
Returns
-------
Random number.
- When parameters `precision` is 0, then return int.
- When parameters `precision` is greater than 0, then return float.
"""
# Handle parameters.
thresholds_len = len(thresholds)
if thresholds_len == 0:
threshold_low = 0
threshold_high = 10
elif thresholds_len == 1:
threshold_low = 0
threshold_high = thresholds[0]
elif thresholds_len == 2:
threshold_low = thresholds[0]
threshold_high = thresholds[1]
else:
raise ValueError("number of parameter 'thresholds' must is 0 or 1 or 2")
if precision is None:
threshold_low_desimal_digits = digits(threshold_low)[1]
threshold_high_desimal_digits = digits(threshold_high)[1]
desimal_digits_max = max(threshold_low_desimal_digits, threshold_high_desimal_digits)
precision = desimal_digits_max
# Get random number.
magnifier = 10 ** precision
threshold_low *= magnifier
threshold_high *= magnifier
number = random_randint(threshold_low, threshold_high)
number = number / magnifier
if precision == 0:
number = int(number)
return number
def get_paths(path: Optional[str] = None, target: Literal["all", "file", "folder"] = "all", recursion: bool = True) -> List:
"""
`Get` the path of files and folders in the `path`.
Parameters
----------
path : When None, then work path.
target : Target data.
- `Literal['all']` : Return file and folder path.
- `Literal['file']` : Return file path.
- `Literal['folder']` : Return folder path.
recursion : Is recursion directory.
Returns
-------
String is path.
"""
# Handle parameters.
if path is None:
path = ""
path = os_abspath(path)
# Get paths.
paths = []
## Recursive.
if recursion:
obj_walk = os_walk(path)
if target == "all":
targets_path = [
os_join(path, file_name)
for path, folders_name, files_name in obj_walk
for file_name in files_name + folders_name
]
paths.extend(targets_path)
elif target == "file":
targets_path = [
os_join(path, file_name)
for path, folders_name, files_name in obj_walk
for file_name in files_name
]
paths.extend(targets_path)
elif target in ("all", "folder"):
targets_path = [
os_join(path, folder_name)
for path, folders_name, files_name in obj_walk
for folder_name in folders_name
]
paths.extend(targets_path)
## Non recursive.
else:
names = os_listdir(path)
if target == "all":
for name in names:
target_path = os_join(path, name)
paths.append(target_path)
elif target == "file":
for name in names:
target_path = os_join(path, name)
is_file = os_isfile(target_path)
if is_file:
paths.append(target_path)
elif target == "folder":
for name in names:
target_path = os_join(path, name)
is_dir = os_isdir(target_path)
if is_dir:
paths.append(target_path)
return paths
def str2n(string: str) -> Any:
"""
Try `convert` string to number.
Parameters
----------
string : String.
Returns
-------
Converted number or source string.
"""
# Number.
if is_number_str(string):
if "." in string:
number = float(string)
else:
number = int(string)
return number
# Not number.
else:
return string
def n2ch(number: int) -> str:
"""
`Convert` number to chinese number.
Parameters
----------
number : Number to convert.
Returns
-------
Chinese number.
"""
# Import.
from .rregular import sub_batch
# Set parameters.
map_digit = {
"0": "零",
"1": "一",
"2": "二",
"3": "三",
"4": "四",
"5": "五",
"6": "六",
"7": "七",
"8": "八",
"9": "九",
}
map_digits = {
0: "",
1: "十",
2: "百",
3: "千",
4: "万",
5: "十",
6: "百",
7: "千",
8: "亿",
9: "十",
10: "百",
11: "千",
12: "万",
13: "十",
14: "百",
15: "千",
16: "兆"
}
# Processing parameters.
number = str(number)
# Replace digit.
for digit, digit_ch in map_digit.items():
number = number.replace(digit, digit_ch)
# Add digits.
number_list = []
for index, digit_ch in enumerate(number[::-1]):
digits_ch = map_digits[index]
number_list.insert(0, digits_ch)
number_list.insert(0, digit_ch)
number = "".join(number_list)
# Delete redundant content.
number = sub_batch(
number,
("(?<=零)[^万亿兆]", ""),
("零+", "零"),
("零(?=[万亿兆])", "")
)
if number[0:2] == "一十":
number = number[1:]
if number[-1:] == "零":
number = number[:-1]
return number | /reyapi-0.3-py3-none-any.whl/reytool/rother.py | 0.893507 | 0.523908 | rother.py | pypi |
from typing import Dict, Iterable, Optional, Union
from smtplib import SMTP
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from .rbase import get_first_notnull
class REmail(object):
"""
Rey's `E-mail` type.
"""
def __init__(
self,
email_username: str,
email_password: str,
display_from_email: Optional[str] = None
) -> None:
"""
`Set` E-mail attribute.
Parameters
----------
email_username : E-mail user name.
email_password : E-mail password.
display_from_email : Displayed from E-mail.
- `None` : Not set.
- `str` : Set this value.
"""
# Set parameters.
self.email_username = email_username
self.email_password = email_password
self.display_from_email = display_from_email
def create_email(
self,
text: Optional[str] = None,
title: Optional[str] = None,
attachment: Optional[Dict[str, Union[str, bytes]]] = None,
display_from_email: Optional[str] = None,
display_to_email: Optional[Union[str, Iterable[str]]] = None,
display_cc_email: Optional[Union[str, Iterable[str]]] = None
) -> str:
"""
`Create` string in E-mail format.
Parameters
----------
text : E-mail text.
title : E-mail title.
attachment : E-mail attachment.
- `Dict[str, str]` : File name and path.
- `Dict[str, bytes]` : File name and stream.
display_from_email : Displayed from E-mail.
display_to_email : Displayed to E-mail.
- `str` : Set this value.
- `Iterable[str]` : Set multiple values.
display_cc_email : Displayed cc E-mail.
- `str` : Set this value.
- `Iterable[str]` : Set multiple values.
Returns
-------
String in E-mail format.
"""
# Get parameters by priority.
display_from_email = get_first_notnull(display_from_email, self.display_from_email, self.email_username)
# Create E-mail object.
mime = MIMEMultipart()
if title is not None:
mime["subject"] = title
if text is not None:
mime_text = MIMEText(text)
mime.attach(mime_text)
if attachment is not None:
for file_name, file_data in attachment.items():
if file_data.__class__ == str:
with open(file_data, "rb") as file:
file_data = file.read()
mime_file = MIMEText(file_data, _charset="utf-8")
mime_file.add_header("content-disposition", "attachment", filename=file_name)
mime.attach(mime_file)
if display_from_email is not None:
mime["from"] = display_from_email
if display_to_email is not None:
if display_to_email.__class__ == str:
mime["to"] = display_to_email
else:
mime["to"] = ",".join(display_to_email)
if display_cc_email is not None:
if display_cc_email.__class__ == str:
mime["cc"] = display_cc_email
else:
mime["cc"] = ",".join(display_cc_email)
# Create string in E-mail format.
email_str = mime.as_string()
return email_str
def send_email(
self,
to_email: Union[str, Iterable[str]],
text: Optional[str] = None,
title: Optional[str] = None,
attachment: Optional[Dict[str, Union[str, bytes]]] = None,
cc_email: Optional[Union[str, Iterable[str]]] = None,
display_from_email: Optional[str] = None,
display_to_email: Optional[Union[str, Iterable[str]]] = None,
display_cc_email: Optional[Union[str, Iterable[str]]] = None
) -> None:
"""
`Send` E-mail.
Parameters
----------
to_email : To E-mail.
- `str` : Set this value.
- `Iterable[str]` : Set multiple values.
text : E-mail text.
title : E-mail title.
attachment : E-mail attachment.
- `Dict[str, str]` : File name and path.
- `Dict[str, bytes]` : File name and stream.
cc_email : Cc E-mail.
- `str` : Set this value.
- `Iterable[str]` : Set multiple values.
display_from_email : Displayed from E-mail.
display_to_email : Displayed to E-mail.
- `str` : Set this value.
- `Iterable[str]` : Set multiple values.
display_cc_email : Displayed cc E-mail.
- `str` : Set this value.
- `Iterable[str]` : Set multiple values.
"""
# Get parameters by priority.
display_from_email = get_first_notnull(display_from_email, self.display_from_email, self.email_username)
display_to_email = get_first_notnull(display_to_email, to_email)
display_cc_email = get_first_notnull(display_cc_email, cc_email)
# Handle parameters.
if to_email.__class__ == str:
to_email = [to_email]
if cc_email is not None:
if cc_email.__class__ == str:
cc_email = [cc_email]
to_email.extend(cc_email)
# Create string in E-mail format.
email_str = self.create_email(title, text, attachment, display_from_email, display_to_email, display_cc_email)
# Send E-mail.
server_domain_name = self.email_username.split("@")[-1]
server_host = "smtp." + server_domain_name
server_port = 25
smtp = SMTP(server_host, server_port)
smtp.login(self.email_username, self.email_password)
smtp.sendmail(self.email_username, to_email, email_str)
smtp.quit() | /reyapi-0.3-py3-none-any.whl/reytool/remail.py | 0.771801 | 0.225736 | remail.py | pypi |
from typing import Any, List, Literal, Optional
from pprint import pformat as pprint_pformat
from urwid import old_str_util
from .rbase import get_first_notnull, get_name
from .rmonkey import pprint_modify_format_width_judgment
from . import roption
# Based on module pprint.pformat, modify the chinese width judgment.
pprint_modify_format_width_judgment()
def split_text(text: str, man_len: int, by_width: bool = False) -> List[str]:
"""
`Split` text by max length or not greater than `display width`.
Parameters
----------
text : Text.
man_len : max length.
by_width : Whether by char displayed width count length.
Returns
-------
Split text.
"""
# Split.
texts = []
## By char displayed width.
if by_width:
str_group = []
str_width = 0
for char in text:
char_width = get_width(char)
str_width += char_width
if str_width > man_len:
string = "".join(str_group)
texts.append(string)
str_group = [char]
str_width = char_width
else:
str_group.append(char)
string = "".join(str_group)
texts.append(string)
## By char number.
else:
test_len = len(text)
split_n = test_len // man_len
if test_len % man_len:
split_n += 1
for n in range(split_n):
start_indxe = man_len * n
end_index = man_len * (n + 1)
text_group = text[start_indxe:end_index]
texts.append(text_group)
return texts
def get_width(text: str) -> int:
"""
`Get` text `display width`.
Parameters
----------
text : Text.
Returns
-------
Text display width.
"""
# Get width.
total_width = 0
for char in text:
char_unicode = ord(char)
char_width = old_str_util.get_width(char_unicode)
total_width += char_width
return total_width
def fill_width(text: str, char: str, width: int, align: Literal["left", "right", "center"] = "right") -> str:
"""
Text `fill` character by `display width`.
Parameters
----------
text : Fill text.
char : Fill character.
width : Fill width.
align : Align orientation.
- `Literal[`left`]` : Fill right, align left.
- `Literal[`right`]` : Fill left, align right.
- `Literal[`center`]` : Fill both sides, align center.
Returns
-------
Text after fill.
"""
# Check parameters.
if get_width(char) != 1:
raise ValueError("parameter `char` value error")
# Fill width.
text_width = get_width(text)
fill_width = width - text_width
if fill_width > 0:
if align == "left":
new_text = "%s%s" % (char * fill_width, text)
elif align == "right":
new_text = "%s%s" % (text, char * fill_width)
elif align == "center":
fill_width_left = int(fill_width / 2)
fill_width_right = fill_width - fill_width_left
new_text = "%s%s%s" % (char * fill_width_left, text, char * fill_width_right)
else:
raise ValueError("parameter `align` value error")
else:
new_text = text
return new_text
def print_frame(
*contents: Any,
title: Optional[str],
width: int,
frame: Literal["full", "half", "plain"]
) -> None:
"""
`Print` contents and frame.
Parameters
----------
contents : Print contents.
title : Print frame title.
- `None` : No title.
- `str` : Use this value as the title.
width : Print frame width.
frame : Frame type.
- `Literal[`full`]` : Build with symbol `═╡╞─║╟╢╔╗╚╝`, and content not can exceed the frame.
When throw `exception`, then frame is `half` type.
- `Literal[`half`]` : Build with symbol `═╡╞─`, and content can exceed the frame.
- `Literal[`plain`]` : Build with symbol `=|-`, and content can exceed the frame.
"""
# Handle parameters.
if title is None or len(title) > width - 6:
title = ""
# Generate frame.
## Full type.
if frame == "full":
if title != "":
title = f"╡ {title} ╞"
width_in = width - 2
_contents = []
try:
for content in contents:
content_str = str(content)
pieces_str = content_str.split("\n")
content_str = [
"║%s║" % fill_width(line_str, " ", width_in)
for piece_str in pieces_str
for line_str in split_text(piece_str, width_in, True)
]
content = "\n".join(content_str)
_contents.append(content)
except:
frame_top = fill_width(title, "═", width, "center")
frame_split = "─" * width
frame_bottom = "═" * width
_contents = contents
else:
frame_top = "╔%s╗" % fill_width(title, "═", width_in, "center")
# frame_split = "╠%s╣" % ("═" * width_in)
frame_split = "╟%s╢" % ("─" * width_in)
frame_bottom = "╚%s╝" % ("═" * width_in)
## Half type.
elif frame == "half":
if title != "":
title = f"╡ {title} ╞"
frame_top = fill_width(title, "═", width, "center")
frame_split = "─" * width
frame_bottom = "═" * width
_contents = contents
## Plain type.
elif frame == "plain":
if title != "":
title = f"| {title} |"
frame_top = fill_width(title, "=", width, "center")
frame_split = "-" * width
frame_bottom = "=" * width
_contents = contents
# Print.
print(frame_top)
for index, content in enumerate(_contents):
if index != 0:
print(frame_split)
print(content)
print(frame_bottom)
def rprint(
*contents: Any,
title: Optional[str] = None,
width: Optional[int] = None,
frame: Optional[Literal["full", "half", "plain"]] = "full",
format: bool = True
) -> None:
"""
`Print` formatted contents.
Parameters
----------
contents : Print contents.
title : Print frame title.
- `None` : No title.
- `str` : Use this value as the title.
width : Print frame width.
- `None` : Use `print_width` of module `roption`.
- `int` : Use this value.
frame : Frame type.
- `Literal[`full`]` : Use `print_frame_full` of module `roption`.
Build with symbol `═╡╞─║╟╢╔╗╚╝`, and content not can exceed the frame.
When throw `exception`, then frame is `half` type.
- `Literal[`half`]` : Use `print_frame_half` of module `roption`.
Build with symbol `═╡╞─`, and content can exceed the frame.
- `Literal[`plain`]` : Use `print_frame_plain` of module `roption`.
Build with symbol `=|-`, and content can exceed the frame.
format : Whether format data of type list or tuple or dict or set.
"""
# Get parameters by priority.
width = get_first_notnull(width, roption.print_width)
# Handle parameters.
if title is None:
titles = get_name(contents)
if titles is not None:
titles = [title if title[:1] != "`" else "" for title in titles]
if set(titles) != {""}:
title = " │ ".join(titles)
if frame == "full":
frame = roption.print_frame_full
elif frame == "half":
frame = roption.print_frame_half
elif frame == "plain":
frame = roption.print_frame_plain
# Format contents.
if format:
if frame == "full":
_width = width - 2
else:
_width = width
contents = [
pprint_pformat(content, width=_width, sort_dicts=False)
if content.__class__ in (list, tuple, dict, set)
else content
for content in contents
]
# Print.
print_frame(*contents, title=title, width=width, frame=frame) | /reyapi-0.3-py3-none-any.whl/reytool/rtext.py | 0.868729 | 0.268081 | rtext.py | pypi |
from typing import List, Tuple, Literal, Optional, Union
import os
import re
import time
import random
from traceback import format_exc
from zipfile import ZipFile, is_zipfile
from .rbasic import error
from . import roption
from .rtext import rprint
def exc(title: str = "Error", to_print: bool = True) -> str:
"""
Print and return error messages, must used in 'except' syntax.
Parameters
----------
title : Print title.
to_print : Whether print error messages.
Returns
-------
Error messages.
"""
error = format_exc()
error = error.strip()
if to_print:
rprint(error, title=title, frame=roption.print_default_frame_half)
return error
def digits(number: Union[int, float]) -> Tuple[int, int]:
"""
Judge the number of integer digits and deciaml digits.
Parameters
----------
number : Number to judge.
Returns
-------
Integer digits and deciaml digits.
"""
number_str = str(number)
if "." in number_str:
integer_str, decimal_str = number_str.split(".")
integer_digits = len(integer_str)
deciaml_digits = len(decimal_str)
else:
integer_digits = len(number_str)
deciaml_digits = 0
return integer_digits, deciaml_digits
def randn(*thresholds: Union[int, float], precision: Optional[int] = None) -> Union[int, float]:
"""
Get random number.
Parameters
----------
thresholds : Low and high thresholds of random range, range contains thresholds.
- When length is 0, then low and high thresholds is 0 and 10.
- When length is 1, then low and high thresholds is 0 and thresholds[0].
- When length is 2, then low and high thresholds is thresholds[0] and thresholds[1].
precision : Precision of random range, that is maximum decimal digits of return value.
- None : Set to Maximum decimal digits of element of parameter *thresholds.
- int : Set to this value.
Returns
-------
Random number.
- When parameters precision is 0, then return int.
- When parameters precision is greater than 0, then return float.
"""
thresholds_len = len(thresholds)
if thresholds_len == 0:
threshold_low = 0
threshold_high = 10
elif thresholds_len == 1:
threshold_low = 0
threshold_high = thresholds[0]
elif thresholds_len == 2:
threshold_low = thresholds[0]
threshold_high = thresholds[1]
else:
error("number of parameter '*thresholds' must is 0 or 1 or 2", ValueError)
if precision == None:
threshold_low_desimal_digits = digits(threshold_low)[1]
threshold_high_desimal_digits = digits(threshold_high)[1]
desimal_digits_max = max(threshold_low_desimal_digits, threshold_high_desimal_digits)
precision = desimal_digits_max
magnifier = 10 ** precision
threshold_low *= magnifier
threshold_high *= magnifier
number = random.randint(threshold_low, threshold_high)
number = number / magnifier
if precision == 0:
number = int(number)
return number
def sleep(*thresholds: Union[int, float], precision: Optional[int] = None) -> Union[int, float]:
"""
Sleep random seconds.
Parameters
----------
thresholds : Low and high thresholds of random range, range contains thresholds.
- When length is 0, then low and high thresholds is 0 and 10.
- When length is 1, then sleep this value.
- When length is 2, then low and high thresholds is thresholds[0] and thresholds[1].
precision : Precision of random range, that is maximum decimal digits of sleep seconds.
- None : Set to Maximum decimal digits of element of parameter *thresholds.
- int : Set to this value.
Returns
-------
Random seconds.
- When parameters precision is 0, then return int.
- When parameters precision is greater than 0, then return float.
"""
thresholds_len = len(thresholds)
if thresholds_len == 0:
second = randn(0, 10, precision=precision)
elif thresholds_len == 1:
second = thresholds[0]
elif thresholds_len == 2:
second = randn(thresholds[0], thresholds[1], precision=precision)
else:
error("number of parameter '*thresholds' must is 0 or 1 or 2", ValueError)
time.sleep(second)
return second
def get_paths(path: Optional[str] = None, target: Literal["all", "file", "folder"] = "all", recursion: bool = True) -> List:
"""
Get the path of files and folders in the path.
Parameters
----------
path : When None, then work path.
target : Target data.
- "all" : Return file and folder path.
- "file : Return file path.
- "folder" : Return folder path.
recursion : Is recursion directory.
Returns
-------
String is path.
"""
if path == None:
path = ""
path = os.path.abspath(path)
paths = []
if recursion:
obj_walk = os.walk(path)
if target == "all":
targets_path = [
os.path.join(path, file_name)
for path, folders_name, files_name in obj_walk
for file_name in files_name + folders_name
]
paths.extend(targets_path)
elif target == "file":
targets_path = [
os.path.join(path, file_name)
for path, folders_name, files_name in obj_walk
for file_name in files_name
]
paths.extend(targets_path)
elif target in ["all", "folder"]:
targets_path = [
os.path.join(path, folder_name)
for path, folders_name, files_name in obj_walk
for folder_name in folders_name
]
paths.extend(targets_path)
else:
names = os.listdir(path)
if target == "all":
for name in names:
target_path = os.path.join(path, name)
paths.append(target_path)
elif target == "file":
for name in names:
target_path = os.path.join(path, name)
is_file = os.path.isfile(target_path)
if is_file:
paths.append(target_path)
elif target == "folder":
for name in names:
target_path = os.path.join(path, name)
is_dir = os.path.isdir(target_path)
if is_dir:
paths.append(target_path)
return paths
map_digit = {
"0": "零",
"1": "一",
"2": "二",
"3": "三",
"4": "四",
"5": "五",
"6": "六",
"7": "七",
"8": "八",
"9": "九",
}
map_digits = {
0: "",
1: "十",
2: "百",
3: "千",
4: "万",
5: "十",
6: "百",
7: "千",
8: "亿",
9: "十",
10: "百",
11: "千",
12: "万",
13: "十",
14: "百",
15: "千",
16: "兆"
}
def n_to_ch(number: int) -> str:
"""
Convert number to chinese number.
Parameters
----------
number : Number to convert.
Returns
-------
Chinese number.
"""
# Processing parameters.
number = str(number)
# Replace digit.
for digit, digit_ch in map_digit.items():
number = number.replace(digit, digit_ch)
# Add digits.
number_list = []
for index, digit_ch in enumerate(number[::-1]):
digits_ch = map_digits[index]
number_list.insert(0, digits_ch)
number_list.insert(0, digit_ch)
number = "".join(number_list)
# Delete redundant content.
pattern = "(?<=零)[^万亿兆]"
number = re.sub(pattern, "", number)
pattern = "零+"
number = re.sub(pattern, "零", number)
pattern = "零(?=[万亿兆])"
number = re.sub(pattern, "", number)
if number[0:2] == "一十":
number = number[1:]
if number[-1:] == "零":
number = number[:-1]
return number | /reyflask-0.2-py3-none-any.whl/reytool/rcommon.py | 0.909012 | 0.446736 | rcommon.py | pypi |
from typing import List, Dict, Tuple, Literal, Optional, Union
import requests
from requests import Response, JSONDecodeError
from faker import Faker
from .rbasic import error, get_first_notnull
from . import roption
# Global variable Faker object.
fake: Faker = Faker("zh_CN")
def fake_headers() -> Dict:
"""
Fake request headers.
Returns
-------
Fake request headers.
"""
headers = {}
headers['user_agent'] = fake.android_platform_token()
return headers
def check_response(
response: Response,
code_fields: Optional[List] = None,
success_codes: Optional[List] = None,
throw_error: bool = True
) -> Tuple[int, str]:
"""
Check whether reponse is successful
Parameters
----------
response : Object from requests package.
code_fields : Possible field names of Response code in Response data.
- None : Use option of module roption.
- List : Use this value.
success_codes : Successful codes.
- None : Use option of module roption.
- List : Use this value.
throw_error : Whether throw error.
Returns
-------
Response code and Response message
"""
code_fields = get_first_notnull(code_fields, roption.code_fields)
success_codes = get_first_notnull(success_codes, roption.success_codes)
reponse_code = response.status_code
if reponse_code not in success_codes:
check_info = reponse_code, response.text
if throw_error:
error(check_info)
return check_info
else:
try:
response_data = response.json()
except JSONDecodeError:
return 200, "success"
if type(response_data) == dict:
for field in code_fields:
if field in response_data:
code = response_data[field]
if code in success_codes:
break
else:
check_info = code, response_data
if throw_error:
error(check_info)
return check_info
return 200, "success"
def request(
url: str,
data: Optional[Dict] = None,
json: Optional[Dict] = None,
headers: Optional[Union[Dict, Literal["fake"]]] = None,
timeout: Optional[Union[int, float]] = None,
proxies: Optional[Dict[str, str]] = None,
method: Optional[Literal["get", "post"]] = None,
check: bool = False,
code_fields: Optional[List] = None,
success_codes: Optional[List] = None
) -> Response:
"""
Send HTTP request.
Parameters
----------
url : Request URL.
data : Request data. Parameter data and json conflict.
json : Request data in JSON format. Parameter data and json conflict.
headers : Request header.
- None : No request header.
- Dict : Use dict as request header.
- Literal['fake'] : Use fake request header.
timeout : Request maximun waiting time.
- None : No limit.
- int, float : Use this value.
proxies : IP proxy setup.
- None : No setup.
- Dict[str, str] : Name and use IP of each protocol.
method : Request method.
- None : Automatic judge.
* When parameter data or json not has value, then request method is get.
* When parameter data or json has value, then request method is post.
- Literal['get'] : Request method is get.
- Literal['post'] : Request method is post.
check : Whether check response.
code_fields : Possible field names of Response code in Response data.
- None : Use option of module roption.
- List : Use this value.
success_codes : Successful codes.
- None : Use option of module roption.
- List : Use this value.
Returns
-------
Response object of requests package.
"""
code_fields = get_first_notnull(code_fields, roption.code_fields)
success_codes = get_first_notnull(success_codes, roption.success_codes)
if method == None:
if data == None and json == None:
method = "get"
else:
method = "post"
if headers == "fake":
headers = fake_headers()
if method == "get":
response = requests.get(url, data=data, json=json, headers=headers, timeout=timeout, proxies=proxies)
elif method == "post":
response = requests.post(url, data=data, json=json, headers=headers, timeout=timeout, proxies=proxies)
if check:
check_response(response, code_fields, success_codes)
return response | /reyflask-0.2-py3-none-any.whl/reytool/rrequest.py | 0.910893 | 0.278735 | rrequest.py | pypi |
from typing import List, Union, Optional
from zipfile import ZipFile, is_zipfile, ZIP_DEFLATED
import os
from .rbasic import error
def compress(obj_path: str, build_dir: Optional[str] = None, overwrite: bool = True) -> None:
"""
Compress file or folder.
Parameters
----------
obj_path : File or folder path.
build_dir : Build directory.
- None : Work directory.
- str : Use this value.
overwrite : Whether to overwrite.
"""
# Processing parameters.
if build_dir == None:
build_dir = os.getcwd()
if overwrite:
mode = "w"
else:
mode = "x"
# Generate build path.
basename = os.path.basename(obj_path)
build_name = os.path.splitext(basename)[0]
build_name += ".zip"
build_path = os.path.join(build_dir, build_name)
# Compress.
with ZipFile(build_path, mode, ZIP_DEFLATED) as zip_file:
zip_file.write(obj_path)
is_dir = os.path.isdir(obj_path)
## Recursive compress.
if is_dir:
dirname = os.path.dirname(obj_path)
dirname_len = len(dirname)
dirs = os.walk(obj_path)
for folder_name, sub_folders_name, files_name in dirs:
for sub_folder_name in sub_folders_name:
sub_folder_path = os.path.join(folder_name, sub_folder_name)
zip_path = sub_folder_path[dirname_len:]
zip_file.write(sub_folder_path, zip_path)
for file_name in files_name:
file_path = os.path.join(folder_name, file_name)
zip_path = file_path[dirname_len:]
zip_file.write(file_path, zip_path)
def decompress(obj_path: str, build_dir: Optional[str] = None, password: Optional[str] = None) -> None:
"""
Decompress compressed object.
Parameters
----------
obj_path : Compressed object path.
build_dir : Build directory.
- None : Work directory.
- str : Use this value.
passwrod : Unzip Password.
- None : No Unzip Password.
- str : Use this value.
"""
# Check object whether can be decompress.
is_support = is_zipfile(obj_path)
if not is_support:
error("file format that cannot be decompressed")
# Processing parameters.
if build_dir == None:
build_dir = os.getcwd()
# Decompress.
with ZipFile(obj_path) as zip_file:
zip_file.extractall(build_dir, pwd=password)
def rzip(obj_path: str, build_dir: Optional[str] = None) -> None:
"""
Automatic judge and compress or decompress object.
Parameters
----------
obj_path : File or folder or compressed object path.
output_path : Build directory.
- None : Work directory.
- str : Use this value.
"""
is_support = is_zipfile(obj_path)
if is_support:
decompress(obj_path, build_dir)
else:
compress(obj_path, build_dir) | /reyflask-0.2-py3-none-any.whl/reytool/rcompress.py | 0.710427 | 0.151498 | rcompress.py | pypi |
from typing import Callable, Any, Generator, Optional
from tqdm import tqdm
from concurrent.futures import ThreadPoolExecutor, as_completed
from .rwrap import update_tqdm
def threads(
func: Callable,
*args: Any,
max_workers: Optional[int] = None,
thread_name: Optional[str] = None,
timeout: Optional[int] = None,
to_tqdm: bool = True,
**kwargs: Any
) -> Generator:
"""
Concurrent multi tasks using thread pool.
Parameters
----------
func : Task function.
args : Position parameter of input parameter task function.
max_workers: Maximum number of threads.
- None : Number of CPU + 4, 32 maximum.
- int : Use this value, no maximum limit.
thread_name: Thread name prefix and progress bar description.
- None : Thread name prefix is 'ThreadPoolExecutor-%d' % index, and no progress bar.
- str : Use this value.
timeout : Call generator maximum waiting second, overdatetime throw error.
- None : Unlimited.
- int : Use this value.
to_tqdm : Whether print progress bar.
kwargs : Keyword parameter of input parameter task function.
Returns
-------
Generator with multi Future object, object from concurrent package.
When called, it will block until all tasks are completed.
When 'for' syntax it, the task that complete first return first.
Examples
--------
Get value.
>>> results = [future.result() for future in Generator]
"""
if thread_name == None:
thread_name = func.__name__
params_lens = {len(param) for param in args}
params_lens -= {1}
min_param_len = min(params_lens)
args = [
list(param) * min_param_len
if len(param) == 1
else param
for param in args
]
kwargs = [
[[key, val]] * min_param_len
if len(val) == 1
else [
[key, param]
for param in val
]
for key, val in kwargs.items()
]
if args:
args = zip(*args)
else:
args = [[]] * min_param_len
if kwargs:
kwargs = zip(*kwargs)
kwargs = [dict(param) for param in kwargs]
else:
kwargs = [{}] * min_param_len
params = zip(args, kwargs)
thread_pool = ThreadPoolExecutor(max_workers, thread_name)
if to_tqdm:
tqdm_desc = "ThreadPool " + thread_name
obj_tqdm = tqdm(desc=tqdm_desc, total=min_param_len)
func = update_tqdm(func, obj_tqdm, _execute=False)
tasks = [thread_pool.submit(func, *args, **kwargs) for args, kwargs in params]
obj_tasks = as_completed(tasks, timeout)
return obj_tasks | /reyflask-0.2-py3-none-any.whl/reytool/rmultitask.py | 0.87079 | 0.286684 | rmultitask.py | pypi |
from typing import Any, Tuple, Dict, Literal, Optional, Union
import time
import datetime
from pandas import DataFrame, concat as pd_concat
from .rbasic import error, check_param, is_number_str
from .rregular import re_search
from .rtext import rprint
def now(
format: Literal["datetime", "date", "time", "datetime_str", "date_str", "time_str", "timestamp"] = "datetime_str"
) -> Union[str, int, datetime.datetime, datetime.date, datetime.time]:
"""
Get current time string or intger or object.
Parameters
----------
format : Format type.
- Literal['datetime'] : Return datetime object of datetime package.
- Literal['date'] : Return date object of datetime package.
- Literal['time'] : Return time object of datetime package.
- Literal['datetime_str'] : Return string in format '%Y-%m-%d %H:%M:%S'.
- Literal['date_str'] : Return string in format '%Y-%m-%d'.
- Literal['time_str'] : Return string in foramt '%H:%M:%S'.
- Literal['timestamp'] : Return timestamp in milliseconds.
Returns
-------
Time string or object of datetime package.
"""
# Return time object by parameter format.
if format == "datetime":
return datetime.datetime.now()
elif format == "date":
return datetime.datetime.now().date()
elif format == "time":
return datetime.datetime.now().time()
elif format == "datetime_str":
return datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
elif format == "date_str":
return datetime.datetime.now().strftime("%Y-%m-%d")
elif format == "time_str":
return datetime.datetime.now().strftime("%H:%M:%S")
elif format == "timestamp":
return int(time.time() * 1000)
def time_to_str(
time_obj: Union[datetime.datetime, datetime.date, datetime.time, datetime.timedelta],
format_str: Optional[str] = None,
throw_error: bool = False
) -> Union[str, Any]:
"""
Format time object of datetime package as string
Parameters
----------
time_obj : Of datetime package.
format_str : Format string.
- None : Automatic by type.
* Parameter time_obj is datetime.datetime : Is '%Y-%m-%d %H:%M:%S'.
* Parameter time_obj is datetime.date : Is '%Y-%m-%d'.
* Parameter time_obj is datetime.time : Is '%H:%M:%S'.
* Parameter time_obj is datetime.timedelta : Is f'{days} %H:%M:%S'.
- str : Format by str.
throw_error : Whether throw error, when parameter time_obj value error, otherwise return original value.
Returns
-------
String after foramt or original value.
"""
# Convert to time string.
obj_type = type(time_obj)
if obj_type == datetime.datetime:
if format_str == None:
string = str(time_obj)[:19]
else:
string = time_obj.strftime(format_str)
elif obj_type == datetime.date:
if format_str == None:
string = str(time_obj)[:10]
else:
string = time_obj.strftime(format_str)
elif obj_type == datetime.time:
if format_str == None:
string = str(time_obj)[:8]
else:
string = time_obj.strftime(format_str)
elif obj_type == datetime.timedelta:
if format_str == None:
string = str(time_obj)
if "day" in string:
day, char, string = string.split(" ")
else:
day = "0"
if string[1] == ":":
string = "0" + string
string = "%s %s" % (day, string[:8])
else:
seconds = time_obj.seconds
time_obj = time.gmtime(seconds)
string = time.strftime(format_str, time_obj)
else:
if throw_error:
check_param(time_obj, datetime.datetime, datetime.date, datetime.time, datetime.timedelta)
return time_obj
return string
def str_to_time(
string: str,
to_type: Optional[Literal["datetime", "date", "time", "timedelta", "timestamp"]] = None,
format_str: Optional[str] = None,
throw_error: bool = False
) -> Union[datetime.datetime, datetime.date, datetime.time, datetime.timedelta, int, Any]:
"""
Format string as time object of datetime package
Parameters
----------
string : Time string.
to_type : Format type.
- None : Automatic judgment.
- Literal['datetime'] : Return datetime object of datetime package.
- Literal['date'] : Return date object of datetime package.
- Literal['time'] : Return time object of datetime package.
- Literal['timedelta'] : Return timedelta object of datetime package.
- Literal['timestamp'] : Return timestamp in milliseconds.
format_str : Format string.
- None : Default format method.
* Parameter to_type is 'datetime' : Is '%Y-%m-%d %H:%M:%S'.
* Parameter to_type is 'date' : Is '%Y-%m-%d'.
* Parameter to_type is 'time' : Is '%H:%M:%S'.
* Parameter to_type is 'timedelta' : Is 'days %H:%M:%S'.
* Parameter to_type is 'timestamp' : Is '%Y-%m-%d %H:%M:%S'.
* Parameter to_type is None : automatic judgment.
- str : Format by str.
throw_error : Whether throw error, when parameter time_obj value error, otherwise return original value.
Returns
-------
Time object of datetime package or timestamp or original value.
"""
# Get time format by automatic judgment.
if to_type == None:
str_len = len(string)
if "年" == string[4:5]:
if str_len > 11:
format_str = "%Y年%m月%d日 %H时%M分%S秒"
to_type = "datetime"
else:
format_str = "%Y年%m月%d日"
to_type = "date"
elif "时" in string[1:3]:
format_str = "%H时%M分%S秒"
to_type = "time"
elif " " in string and "-" not in string:
format_str = "%H:%M:%S"
to_type = "timedelta"
elif str_len == 19:
format_str = "%Y-%m-%d %H:%M:%S"
to_type = "datetime"
elif str_len == 14:
format_str = "%Y%m%d%H%M%S"
to_type = "datetime"
elif str_len == 10:
format_str = "%Y-%m-%d"
to_type = "date"
elif str_len == 8:
if string[2] == ":":
format_str = "%H:%M:%S"
to_type = "time"
else:
format_str = "%Y%m%d"
to_type = "date"
elif str_len == 6:
format_str = "%H%M%S"
to_type = "time"
elif str_len == 4:
format_str = "%Y"
to_type = "date"
else:
return string
# Get time format by parameter to_type.
else:
if format_str == None:
format_dir = {
"datetime": "%Y-%m-%d %H:%M:%S",
"date": "%Y-%m-%d",
"time": "%H:%M:%S",
"timestamp": "%Y-%m-%d %H:%M:%S",
"timedelta": "%H:%M:%S"
}
format_str = format_dir[to_type]
# Additional processing timedelta type.
if to_type == "timedelta":
if " " in string:
strings = string.split(" ")
day_str, string = strings[0], strings[-1]
else:
day = "0"
try:
day = int(day_str)
except ValueError:
if throw_error:
error("failed to format string as time object")
return string
# Convert to time type.
try:
time_obj = datetime.datetime.strptime(string, format_str)
except ValueError:
if throw_error:
error("failed to format string as time object")
return string
if to_type == "date":
time_obj = time_obj.date()
elif to_type == "time":
time_obj = time_obj.time()
elif to_type == "timestamp":
time_obj = int(time_obj.timestamp() * 1000)
elif to_type == "timedelta":
second = time_obj.second
second += day * 86400
time_obj = datetime.timedelta(seconds=second)
return time_obj
def is_sql_time(content: Union[str, int], return_value: bool = False) -> Union[bool, Tuple[int, int, int, int, int, int]]:
"""
Judge whether it conforms to SQL time format.
Parameters
----------
content : Judge object.
return_value : Whether return value.
Returns
-------
Judgment result or transformed values.
"""
content_type = type(content)
if content_type == str:
content_len = len(content)
if content_len < 5:
return False
if is_number_str(content[4]):
if content_len == 8:
datetimes_str = [content[0:4], content[4:6], content[6:8], None, None, None]
else:
pattern = "^(\d{2}|\d{4})(\d{2})(\d{1,2})(\d{0,2})(\d{0,2})(\d{0,2})$"
result = re_search(pattern, content)
datetimes_str = list(result)
else:
pattern = "^(\d{2}|\d{4})[\W_](\d{2})[\W_](\d{2})[\W_]?(\d{2})?[\W_]?(\d{2})?[\W_]?(\d{2})?$"
result = re_search(pattern, content)
datetimes_str = list(result)
elif content_type == int:
content = str(content)
content_len = len(content)
if content_len < 3:
return False
elif content_len <= 8:
pattern = r"^(\d{0,4}?)(\d{1,2}?)(\d{2})$"
result = re_search(pattern, content)
datetimes_str = list(result)
datetimes_str += [None, None, None]
else:
pattern = r"^(\d{0,4}?)(\d{1,2})(\d{2})(\d{2})(\d{2})(\d{2})$"
result = re_search(pattern, content)
datetimes_str = list(result)
year_len = len(datetimes_str[0])
datetimes_str[0] = "2000"[0:4-year_len] + datetimes_str[0]
try:
year, month, day, hour, minute, second = [
0 if int_str in ["", None] else int(int_str)
for int_str in datetimes_str
]
datetime.datetime(year, month, day, hour, minute, second)
if return_value:
return year, month, day, hour, minute, second
return True
except ValueError:
pass
return False
class rdatetimeMark():
"""
Rey's time mark type.
"""
def __init__(self) -> None:
"""
Mark now time.
"""
self.mark()
def mark(self) -> Dict[
Literal["index", "timestamp", "datetime", "datetime_str", "interval_timestamp", "interval_timedelta", "interval_timedelta_str"],
Optional[Union[str, float, datetime.datetime, datetime.timedelta]]
]:
"""
Mark now time and return mark time information.
Returns
-------
Mark time information.
"""
if "record" not in self.__dir__():
self.record = []
record_len = len(self.record)
mark_info = {
"index": record_len,
"timestamp": now("timestamp"),
"datetime": now("datetime"),
"datetime_str": now(),
}
if record_len == 0:
mark_info["interval_timestamp"] = None
mark_info["interval_timedelta"] = None
mark_info["interval_timedelta_str"] = None
else:
last_datetime = self.record[-1]["datetime"]
last_timestamp = self.record[-1]["timestamp"]
mark_info["interval_timestamp"] = mark_info["timestamp"] - last_timestamp
mark_info["interval_timedelta"] = mark_info["datetime"] - last_datetime
mark_info["interval_timedelta_str"] = time_to_str(mark_info["interval_timedelta"])
self.record.append(mark_info)
return mark_info
def report(self) -> DataFrame:
"""
Print and return mark time information.
Returns
-------
DataFrame object of pandas package with mark time information.
"""
data = [
{
"timestamp": row["timestamp"],
"datetime": row["datetime_str"],
"interval": row["interval_timedelta_str"]
}
for row in self.record
]
report_df = DataFrame(data)
interval_timedelta = self.record[-1]["datetime"] - self.record[0]["datetime"]
interval = time_to_str(interval_timedelta)
sum_df = DataFrame({"interval": interval}, index = ["sum"])
report_df = pd_concat([report_df, sum_df])
report_df.fillna("-", inplace=True)
title = "Time Mark"
rprint(report_df, title=title)
return report_df | /reyflask-0.2-py3-none-any.whl/reytool/rdatetime.py | 0.865693 | 0.284483 | rdatetime.py | pypi |
from typing import Any, List, Tuple, Iterable, Callable, NoReturn, Type, Literal, Optional, Union
from warnings import warn as warnings_warn
from varname import argname
def error(error_info: Optional[Any] = None, error_type: Type[BaseException] = AssertionError) -> NoReturn:
"""
Throw error.
Parameters
----------
error_info : Error information.
error_type : Error type.
"""
if error_info == None:
raise error_type
raise error_type(error_info)
def warn(*warn_infos: Any, warn_type: Type[BaseException] = UserWarning, stacklevel: int = 3) -> None:
"""
Throw warning.
Parameters
----------
warn_info : Warn informations.
warn_type : Warn type.
stacklevel : Warning code location, number of recursions up the code level.
"""
if warn_infos == ():
warn_infos = "Warning!"
elif len(warn_infos) == 1:
warn_info_type = type(warn_infos[0])
if warn_info_type == str:
warn_infos = warn_infos[0]
else:
warn_infos = str(warn_infos[0])
else:
warn_infos = str(warn_infos)
warnings_warn(warn_infos, warn_type, stacklevel)
def check_param(value: Any, *targets: Union[Any, Literal["_iterable"]], check_element: bool = False) -> None:
"""
Check the content or type of the value, when check fail, then throw error.
Parameters
---------
value : Check object.
targets : Correct target, can be type.
- Any : Check whether it is the target.
- Literal['_iterable'] : Check whether it can be iterable.
check_element : Whether check element in value.
"""
if check_element:
values = value
else:
values = [value]
for element in values:
if "_iterable" in targets and is_iterable(element):
continue
if type(element) in targets:
continue
targets_id = [id(target) for target in targets]
if id(element) in targets_id:
continue
var_name = get_name(value)
if var_name != None:
var_name = " '%s'" % var_name
else:
var_name = ""
correct_targets_str = ", ".join([repr(target) for target in targets])
if check_element:
error_text = "parameter%s the elements content or type must in [%s], now: %s" % (var_name, correct_targets_str, repr(value))
else:
error_text = "parameter%s the content or type must in [%s], now: %s" % (var_name, correct_targets_str, repr(value))
error(error_text, ValueError)
def check_least_one(*values: Any) -> None:
"""
Check that at least one of multiple values is not None, when check fail, then throw error.
Parameters
----------
values : Check values.
"""
for value in values:
if value != None:
return
vars_name = get_name(values)
if vars_name != None:
vars_name_de_dup = list(set(vars_name))
vars_name_de_dup.sort(key=vars_name.index)
vars_name_str = " " + " and ".join(["\"%s\"" % var_name for var_name in vars_name_de_dup])
else:
vars_name_str = ""
error_text = "at least one of parameters%s is not None" % vars_name_str
error(error_text, ValueError)
def check_only_one(*values: Any) -> None:
"""
Check that at most one of multiple values is not None, when check fail, then throw error.
Parameters
----------
values : Check values.
"""
none_count = 0
for value in values:
if value != None:
none_count += 1
if none_count > 1:
vars_name = get_name(values)
if vars_name != None:
vars_name_de_dup = list(set(vars_name))
vars_name_de_dup.sort(key=vars_name.index)
vars_name_str = " " + " and ".join(["\"%s\"" % var_name for var_name in vars_name_de_dup])
else:
vars_name_str = ""
error_text = "at most one of parameters%s is not None" % vars_name_str
error(error_text, ValueError)
def is_iterable(obj: Any, exclude_types: Iterable[Type] = [str, bytes]) -> bool:
"""
Judge whether it is iterable.
Parameters
----------
obj : Judge object.
exclude_types : Non iterative types.
Returns
-------
Judgment result.
"""
obj_type = type(obj)
if obj_type in exclude_types:
return False
try:
obj_dir = obj.__dir__()
except TypeError:
return False
if "__iter__" in obj_dir:
return True
else:
return False
def is_table(obj: Any, check_fields: bool = True) -> bool:
"""
Judge whether it is List[Dict] table format and keys and keys sort of the Dict are the same.
Parameters
----------
obj : Judge object.
check_fields : Do you want to check the keys and keys sort of the Dict are the same.
Returns
-------
Judgment result.
"""
obj_type = type(obj)
if obj_type != list:
return False
for element in obj:
if type(element) != dict:
return False
if check_fields:
keys_strs = [
":".join([str(key) for key in element.keys()])
for element in obj
]
keys_strs_only = set(keys_strs)
if len(keys_strs_only) != 1:
return False
return True
def is_number_str(text: str, return_value: bool = False) -> Union[bool, int, float]:
"""
Judge whether it is number string.
Parameters
----------
text : Judge text.
return_value : Whether return value.
Returns
-------
Judgment result or transformed value.
"""
try:
if "." in text:
number = float(text)
else:
number = int(text)
except ValueError:
return False
if return_value:
return number
return True
def get_first_notnull(*values: Any, default: Optional[Union[Any, Literal["error"]]] = None, none_values: List = [None]) -> Any:
"""
Get the first value that is not null.
Parameters
----------
values : Check values.
default : When all are None, then return this is value, or throw error.
- Any : Return this is value.
- Literal['error'] : Throw error.
none_values : Range of None values.
Returns
-------
When all are None, then return default value.
"""
for value in values:
if value not in none_values:
return value
if default == "error":
vars_name = get_name(values)
if vars_name != None:
vars_name_de_dup = list(set(vars_name))
vars_name_de_dup.sort(key=vars_name.index)
vars_name_str = " " + " and ".join(["\"%s\"" % var_name for var_name in vars_name_de_dup])
else:
vars_name_str = ""
error_text = "at least one of parameters%s is not None" % vars_name_str
error(error_text, ValueError)
return default
def ins(obj: Any, *arrays: Iterable) -> bool:
"""
Judge whether the object is in multiple array.
Parameters
----------
obj : Judge object.
arrays : Array.
Returns
-------
Judge result.
"""
for array in arrays:
if obj in array:
return True
return False
def mutual_in(*arrays: Iterable) -> bool:
"""
Whether the same element exists in multiple array.
Parameters
----------
arrays : Array.
Returns
-------
Judge result.
"""
arrays = list(arrays)
for n, array in enumerate(arrays):
for after_array in arrays[n+1:]:
for element in array:
if ins(element, after_array):
return True
return False
def to_type(obj: Any, to_type: Type, method: Optional[Callable] = None) -> Any:
"""
Convert object type.
Parameters
----------
obj : Convert object.
to_type : Target type.
method : Convert method.
- None : Use value of parameter to_type.
- Callable : Use this method.
Returns
-------
Converted object.
"""
if type(obj) == to_type:
return obj
if method != None:
return method(obj)
else:
return to_type(obj)
def get_name(obj: Any, frame: int = 2) -> Optional[Union[str, Tuple[str, ...]]]:
"""
Get object name.
Parameters
----------
obj : Object.
frame : Number of code to upper level.
Returns
-------
Object name or None.
"""
try:
name = obj.__name__
except AttributeError:
name = "obj"
try:
for _frame in range(1, frame + 1):
name = argname(name, frame=_frame)
if type(name) != str:
if "".join(name) == "":
name = None
except:
name = None
return name | /reyflask-0.2-py3-none-any.whl/reytool/rbasic.py | 0.895222 | 0.348562 | rbasic.py | pypi |
from typing import Any, Tuple, Callable, Optional, Union
from tqdm import tqdm as tqdm_tqdm
from threading import Thread
from functools import wraps as functools_wraps
from .rcommon import exc
from .rtext import print_frame
from .rdatetime import rdatetimeMark, now
def wrap_frame(decorator: Callable) -> Callable:
"""
Decorative frame.
Parameters
----------
decorator : Decorator function.
Retuens
-------
Decorator after decoration.
Examples
--------
Decoration function method one.
>>> @wrap_func
>>> def func(): ...
>>> func_ret = func(param_a, param_b, param_c=1, param_d=2)
Decoration function method two.
>>> def func(): ...
>>> func_ret = wrap_func(func, param_a, param_b, param_c=1, param_d=2)
Decoration function method three.
>>> def func(): ...
>>> func_ret = wrap_func(func, _execute=True)
Decoration function method four.
>>> def func(): ...
>>> func = wrap_func(func)
>>> func_ret = func(param_a, param_b, param_c=1, param_d=2)
Decoration function method five.
>>> def func(): ...
>>> func = wrap_func(func, param_a, param_c=1, _execute=False)
>>> func_ret = func(param_b, param_d=2)
"""
@functools_wraps(decorator)
def wrap(func: Callable, *args: Any, _execute: Optional[bool] = None, **kwargs: Any) -> Union[Callable, Any]:
"""
Decorative shell.
Parameters
----------
_execute : Whether execute function, otherwise decorate function.
- None : When parameter *args or **kwargs have values, then True, otherwise False.
- bool : Use this value.
Returns
-------
Function after decoration or return of function.
"""
if _execute == None:
if args or kwargs:
_execute = True
else:
_execute = False
if _execute:
func_ret = decorator(func, *args, **kwargs)
return func_ret
else:
@functools_wraps(func)
def wrap_sub(*_args: object, **_kwargs: object) -> object:
"""
Decorative sub shell.
"""
func_ret = decorator(func, *args, *_args, **kwargs, **_kwargs)
return func_ret
return wrap_sub
return wrap
def wraps(*wrap_funcs: Callable) -> Callable:
"""
Batch decorate.
parameters
----------
wrap_funcs : Decorator function.
Retuens
-------
Function after decoration.
Examples
--------
Decoration function.
>>> @wraps(print_funtime, state_thread)
>>> def func(): ...
>>> func_ret = func()
Same up and down
>>> @print_funtime
>>> @state_thread
>>> def func(): ...
>>> func_ret = func()
Same up and down
>>> def func(): ...
>>> func = print_funtime(func)
>>> func = state_thread(func)
>>> func_ret = func()
"""
def func(): ...
for wrap_func in wrap_funcs:
@functools_wraps(func)
def wrap(func: Callable) -> Callable:
"""
Decorative shell
"""
@functools_wraps(func)
def wrap_sub(*args: object, **kwargs: object) -> object:
"""
Decorative sub shell
"""
func_ret = wrap_func(func, *args, _execute=True, **kwargs)
return func_ret
return wrap_sub
func = wrap
return wrap
@wrap_frame
def runtime(func: Callable, *args: Any, _ret_report: bool = False, **kwargs: Any) -> Union[Any, Tuple[Any, str]]:
"""
Print or return runtime report of the function.
Parameters
----------
func : Function to be decorated.
args : Position parameter of input parameter decorated function.
_ret_report : Whether return report, otherwise print report.
kwargs : Keyword parameter of input parameter decorated function.
Returns
-------
Function run result or runtime report.
"""
rtm = rdatetimeMark()
func_ret = func(*args, **kwargs)
rtm.mark()
runtime = rtm.record[-1]["interval_timestamp"] / 1000
report = "Start: %s -> Spend: %ss -> End: %s" % (
rtm.record[0]["datetime_str"],
runtime,
rtm.record[1]["datetime_str"]
)
title = func.__name__
if _ret_report:
return func_ret, report
else:
print_frame(report, title=title)
return func_ret
@wrap_frame
def start_thread(func: Callable, *args: Any, _daemon: bool = True, **kwargs: Any) -> Thread:
"""
Function start in thread.
Parameters
----------
func : Function to be decorated.
args : Position parameter of input parameter decorated function.
_daemon : Whether it is a daemon thread.
kwargs : Keyword parameter of input parameter decorated function.
Returns
-------
Thread object.
"""
thread_name = "%s_%d" % (func.__name__, now("timestamp"))
thread = Thread(target=func, name=thread_name, args=args, kwargs=kwargs)
thread.daemon = _daemon
thread.start()
return thread
@wrap_frame
def try_exc(
func: Callable,
*args: Any,
**kwargs: Any
) -> Union[None, Any]:
"""
Execute function with 'try' syntax and print error information.
Parameters
----------
func : Function to be decorated.
args : Position parameter of input parameter decorated function.
kwargs : Keyword parameter of input parameter decorated function.
Returns
-------
Function run result or no return.
"""
try:
func_ret = func(*args, **kwargs)
return func_ret
except:
func_name = func.__name__
exc(func_name)
@wrap_frame
def update_tqdm(
func: Callable,
tqdm: tqdm_tqdm,
*args: Any,
_desc: Optional[str] = None,
_step: Union[int, float] = 1,
**kwargs: Any
) -> Any:
"""
Update progress bar tqdm object of tqdm package.
Parameters
----------
func : Function to be decorated.
tqdm : Progress bar tqdm object.
args : Position parameter of input parameter decorated function.
_desc : Progress bar description.
- None : no description.
- str : Add description.
_step : Progress bar step size.
- When greater than 0, then forward.
- When less than 0, then backward.
kwargs : Keyword parameter of input parameter decorated function.
Returns
-------
Function run result or no return.
"""
if _desc != None:
tqdm.set_description(_desc)
func_ret = func(*args, **kwargs)
tqdm.update(_step)
return func_ret | /reyflask-0.2-py3-none-any.whl/reytool/rwrap.py | 0.901149 | 0.232539 | rwrap.py | pypi |
from typing import Any, List, Dict, Iterable, Optional, Literal, Union
import re
from sqlalchemy import create_engine, text
from sqlalchemy.engine.base import Engine, Connection, Transaction
from sqlalchemy.sql.elements import TextClause
# Version compatible of package sqlalchemy.
try:
from sqlalchemy import CursorResult
except ImportError:
from sqlalchemy.engine.cursor import LegacyCursorResult as CursorResult
from .rbasic import get_first_notnull, error
from .rdata import to_table, to_df, to_json, to_sql, to_html, to_csv, to_excel
from .roption import print_default_frame_full
from .rtext import rprint
from .rdatetime import now
from .rwrap import runtime
def monkey_patch_more_fetch() -> None:
"""
Add more methods to CursorResult object of sqlalchemy package.
"""
# Fetch SQL result to table in List[Dict] format.
CursorResult.fetch_table = to_table
# Fetch SQL result to DataFrame object.
CursorResult.fetch_df = to_df
# Fetch SQL result to JSON string.
CursorResult.fetch_json = to_json
# Fetch SQL result to SQL string.
CursorResult.fetch_sql = to_sql
# Fetch SQL result to HTML string.
CursorResult.fetch_sql = to_html
# Fetch SQL result to save csv format file.
CursorResult.fetch_csv = to_csv
# Fetch SQL result to save excel file.
CursorResult.fetch_excel = to_excel
monkey_patch_more_fetch()
class RConnect(object):
"""
Rey's database connection type, based on the package sqlalchemy.
"""
# Values to be converted to None.
none_values: List = ["", " ", b"", [], (), {}, set()]
def __init__(
self,
user: Optional[str] = None,
password: Optional[str] = None,
host: Optional[str] = None,
port: Optional[str] = None,
database: Optional[str] = None,
charset: Optional[str] = None,
autocommit: bool = True,
conn: Optional[Union[Engine, Connection]] = None,
recreate_ms: int = 7_200_000
) -> None:
"""
Set database connection parameters.
Parameters
----------
user : Server user name.
password : Server password.
host : Server host.
port : Server port.
database : Database name in the server.
charset : Coding charset.
autocommit : Whether the auto commit for execution.
conn : Existing connection object, will get parameters from it, but preferred input parameters.
recreate_ms : Connection object recreate interval millisecond.
"""
# Get parameters from existing connection object.
if type(conn) == Connection:
conn = conn.engine
if type(conn) == Engine:
user = get_first_notnull(user, conn.url.username)
password = get_first_notnull(password, conn.url.password)
host = get_first_notnull(host, conn.url.host)
port = get_first_notnull(port, conn.url.port)
database = get_first_notnull(database, conn.url.database)
charset = get_first_notnull(charset, conn.url.query.get("charset"))
conn = conn.connect()
# Set parameters.
self.user = user
self.password = password
self.host = host
self.port = port
self.database = database
self.charset = charset
self.autocommit = autocommit
self.conn = conn
self.begin = None
self.conn_timestamp = now("timestamp")
self.recreate_ms = recreate_ms
def commit(self) -> None:
"""
Commit cumulative executions.
"""
# Commit.
if self.begin != None:
self.begin.commit()
self.begin = None
def rollback(self) -> None:
"""
Rollback cumulative executions.
"""
# Rollback.
if self.begin != None:
self.begin.rollback()
self.begin = None
def close(self) -> None:
"""
Close database connection.
"""
# Close.
if self.conn != None:
self.conn.close()
self.conn = None
self.begin = None
def connect(
self,
user: Optional[str] = None,
password: Optional[str] = None,
host: Optional[str] = None,
port: Optional[Union[str, int]] = None,
database: Optional[str] = None,
charset: Optional[str] = None
) -> Connection:
"""
Get database connection object.
Parameters
----------
user : Server user name.
password : Server password.
host : Server host.
port : Server port.
database : Database name in the server.
charset : Coding charset.
Returns
-------
Connection object.
"""
# Check whether the connection object is invalid.
if self.conn != None \
and (
now("timestamp") > self.conn_timestamp + self.recreate_ms \
or self.conn.closed
):
self.close()
# Judge whether existing connection objects can be reused.
elif self.conn != None \
and (user == None or self.conn.engine.url.username == user) \
and (password == None or self.conn.engine.url.password == password) \
and (host == None or self.conn.engine.url.host == host) \
and (port == None or self.conn.engine.url.port == port) \
and (database == None or self.conn.engine.url.database == database) \
and (charset == None or self.conn.engine.url.query["charset"] == charset):
return self.conn
# Get parameters by priority.
user: str = get_first_notnull(user, self.user, default="error")
password: str = get_first_notnull(password, self.password, default="error")
host: str = get_first_notnull(host, self.host, default="error")
port: Union[str, int] = get_first_notnull(port, self.port, default="error")
database: str = get_first_notnull(database, self.database, default="error")
charset: str = get_first_notnull(charset, self.charset, default="utf8")
# Create connection object.
try:
url = f"mysql+mysqldb://{user}:{password}@{host}:{port}/{database}?charset={charset}"
engine = create_engine(url)
except ModuleNotFoundError:
url = f"mysql+pymysql://{user}:{password}@{host}:{port}/{database}?charset={charset}"
engine = create_engine(url)
conn = engine.connect()
# Save connection object.
self.conn = conn
self.conn_timestamp = now("timestamp")
return conn
def file_data_by_sql(
self,
sql: Union[str, TextClause],
params: Union[Dict, List[Dict]],
fill_field: bool = True,
none_values: List = none_values
) -> List[Dict]:
"""
Fill missing parameters according to contents of sqlClause object of sqlalchemy module, and filter out empty Dict.
Parameters
----------
sql : SQL in sqlalchemy.text format or return of sqlalchemy.text.
params : Parameters set for filling sqlalchemy.text.
fill_field : Whether fill missing fields.
none_values : Values to be converted to None.
Returns
-------
Filled parameters.
"""
# Handle parameters.
if type(params) == dict:
params = [params]
# Filter out empty Dict.
params = [
param
for param in params
if param != {}
]
# Extract fill field names.
if type(sql) == TextClause:
sql = sql.text
pattern = "(?<!\\\):(\w+)"
sql_keys = re.findall(pattern, sql)
# Fill.
for param in params:
for key in sql_keys:
if fill_field:
val = param.get(key)
else:
val = param[key]
if val in none_values:
val = None
param[key] = val
return params
def execute(
self,
sql: Union[str, TextClause],
params: Optional[Union[List[Dict], Dict]] = None,
database: Optional[str] = None,
fill_field: bool = True,
none_values: List = none_values,
autocommit: Optional[bool] = None,
report: bool = False,
**kw_params: Any
) -> CursorResult:
"""
Execute SQL.
Parameters
----------
sql : SQL in sqlalchemy.text format or return of sqlalchemy.text.
params : Parameters set for filling sqlalchemy.text.
database : Database name.
fill_field : Whether fill missing fields.
none_values : Values to be converted to None.
autocommit : Whether the auto commit for execution.
report : Whether print SQL and SQL runtime.
kw_params : Keyword parameters for filling sqlalchemy.text.
Returns
-------
CursorResult object of alsqlchemy package.
"""
# Get parameters by priority.
autocommit = get_first_notnull(autocommit, self.autocommit, default=True)
# Handle parameters.
if type(sql) == str:
sql = text(sql)
if params != None:
if type(params) == dict:
params = [params]
else:
params = params.copy()
for param in params:
param.update(kw_params)
else:
params = [kw_params]
params = self.file_data_by_sql(sql, params, fill_field, none_values)
# Get Connection object.
conn = self.connect(database=database)
# Get Transaction object.
if self.begin == None:
self.begin = conn.begin()
# Execute SQL.
if report:
result, report_runtime = runtime(conn.execute, sql, params, _ret_report=True)
report_info = "%s\nRow Count: %d" % (report_runtime, result.rowcount)
if params != None:
rprint(report_info, sql, title="SQL", frame=print_default_frame_full)
else:
rprint(report_info, sql, params, title="SQL", frame=print_default_frame_full)
else:
result = conn.execute(sql, params)
# Commit execute.
if autocommit:
self.commit()
return result
def execute_select(
self,
table: str,
database: Optional[str] = None,
fields: Optional[Union[str, Iterable]] = None,
where: Optional[str] = None,
order: Optional[str] = None,
limit: Optional[Union[int, str, Iterable[Union[int, str]]]] = None,
report: bool = False
) -> CursorResult:
"""
Execute select SQL.
Parameters
----------
table : Table name.
database : Database name.
fields : Select clause content.
- None : Is 'SELECT *'.
- str : Join as 'SELECT str'.
- Iterable[str] : Join as 'SELECT \`str\`, ...'.
where : 'WHERE' clause content, join as 'WHERE str'.
order : 'ORDER BY' clause content, join as 'ORDER BY str'.
limit : 'LIMIT' clause content.
- Union[int, str] : Join as 'LIMIT int/str'.
- Iterable[Union[str, int]] with length of 1 or 2 : Join as 'LIMIT int/str, ...'.
report : Whether print SQL and SQL runtime.
Returns
-------
CursorResult object of alsqlchemy package.
"""
# Handle parameters.
sqls = []
if database == None:
_database = self.database
else:
_database = database
if fields == None:
fields = "*"
elif type(fields) != str:
fields = ",".join(["`%s`" % field for field in fields])
# Generate SQL.
select_sql = (
f"SELECT {fields}\n"
f"FROM `{_database}`.`{table}`"
)
sqls.append(select_sql)
if where != None:
where_sql = "WHERE %s" % where
sqls.append(where_sql)
if order != None:
order_sql = "ORDER BY %s" % order
sqls.append(order_sql)
if limit != None:
list_type = type(limit)
if list_type in [str, int]:
limit_sql = f"LIMIT {limit}"
else:
if len(limit) in [1, 2]:
limit_content = ",".join([str(val) for val in limit])
limit_sql = "LIMIT %s" % limit_content
else:
error("The length of the limit parameter value must be 1 or 2", ValueError)
sqls.append(limit_sql)
sql = "\n".join(sqls)
# Execute SQL.
result = self.execute(sql, database=database, report=report)
return result
def execute_update(
self,
data: Union[CursorResult, List[Dict], Dict],
table: str,
database: Optional[str] = None,
where_fields: Optional[Union[str, Iterable[str]]] = None,
report: bool = False
) -> Union[None, CursorResult]:
"""
Update the data of table in the datebase.
Parameters
----------
data : Updated data.
table : Table name.
database : Database name.
where_fields : 'WHERE' clause content.
- None : The first key value pair of each item is judged.
- str : This key value pair of each item is judged.
- Iterable[str] : Multiple judged, 'and' relationship.
report : Whether print SQL and SQL runtime.
Returns
-------
None or CursorResult object.
- None : When the data is empty.
- CursorResult object : When the data is not empty.
"""
# If data is empty, not execute.
if data in ({}, [], [{}]):
return
# Handle parameters.
data_type = type(data)
if data_type == CursorResult:
data = to_table(data)
elif data_type == dict:
data = [data]
if database == None:
_database = self.database
else:
_database = database
# Generate SQL.
data_flatten = {}
sqls = []
if where_fields == None:
no_where = True
else:
no_where = False
if type(where_fields) == str:
where_fields = [where_fields]
for index, row in enumerate(data):
for key, val in row.items():
index_key = "%d_%s" % (index, key)
data_flatten[index_key] = val
if no_where:
where_fields = [list(row.keys())[0]]
set_content = ",".join(
[
"`%s` = :%d_%s" % (key, index, key)
for key in row
if key not in where_fields
]
)
where_content = "\n AND ".join(
[
f"`{field}` = :{index}_{field}"
for field in where_fields
]
)
sql = (
f"UPDATE `{_database}`.`{table}`\n"
f"SET {set_content}\n"
f"WHERE {where_content}"
)
sqls.append(sql)
sqls = ";\n".join(sqls)
# Execute SQL.
result = self.execute(sqls, data_flatten, database, once=False, report=report)
return result
def execute_insert(
self,
data: Union[CursorResult, List[Dict], Dict],
table: str,
database: Optional[str] = None,
duplicate_method: Optional[Literal["ignore", "update"]] = None,
report: bool = False
) -> Union[None, CursorResult]:
"""
Insert the data of table in the datebase.
Parameters
----------
data : Updated data.
table : Table name.
database : Database name.
duplicate_method : Handle method when constraint error.
- None : Not handled.
- 'ignore' : Use 'UPDATE IGNORE INTO' clause.
- 'update' : Use 'ON DUPLICATE KEY UPDATE' clause.
report : Whether print SQL and SQL runtime.
Returns
-------
None or CursorResult object.
- None : When the data is empty.
- CursorResult object : When the data is not empty.
"""
# If data is empty, not execute.
if data in ({}, [], [{}]):
return
# Handle parameters.
data_type = type(data)
if data_type == CursorResult:
data = self.to_table(data)
elif data_type == dict:
data = [data]
if database == None:
_database = self.database
else:
_database = database
# Generate SQL.
fields = list({key for row in data for key in row})
fields_str = ",".join(["`%s`" % field for field in fields])
fields_str_position = ",".join([":" + field for field in fields])
if duplicate_method == "ignore":
sql = (
f"INSERT IGNORE INTO `{_database}`.`{table}`({fields_str})\n"
f"VALUES({fields_str_position})"
)
elif duplicate_method == "update":
update_content = ",".join(["`%s` = VALUES(`%s`)" % (field, field) for field in fields])
sql = (
f"INSERT INTO `{_database}`.`{table}`({fields_str})\n"
f"VALUES({fields_str_position})\n"
"ON DUPLICATE KEY UPDATE\n"
f"{update_content}"
)
else:
sql = (
f"INSERT INTO `{_database}`.`{table}`({fields_str})\n"
f"VALUES({fields_str_position})"
)
# Execute SQL.
result = self.execute(sql, data, database, report=report)
return result | /reyflask-0.2-py3-none-any.whl/reytool/rdatabase.py | 0.870748 | 0.170715 | rdatabase.py | pypi |
from typing import Any, List, Tuple, Dict, Iterable, Literal, Optional, Union, Type
from pandas import DataFrame, ExcelWriter
# Version compatible of package sqlalchemy.
try:
from sqlalchemy import CursorResult
except ImportError:
from sqlalchemy.engine.cursor import LegacyCursorResult as CursorResult
from .rbasic import is_iterable, check_least_one, to_type
from .rdatetime import time_to_str
def to_table(
data: Union[CursorResult, DataFrame, List[Dict], Iterable[Iterable]],
fields: Optional[Iterable] = None
) -> List[Dict]:
"""
Fetch data to table in List[Dict] format, keys and keys sort of the dictionary are the same.
Parameters
----------
data : Data.
fields : Table fields.
- None : Infer.
- Iterable : Use values in Iterable.
Returns
-------
Table in List[Dict] format.
"""
data_type = type(data)
if data_type == CursorResult:
if fields == None:
fields = data.keys()
data_table = [dict(zip(fields, [val for val in row])) for row in data]
elif data_type == DataFrame:
data_df = data.copy()
if fields != None:
data_df.columns = fields
data_df = data_df.where(data.notnull(), None)
data_table = data_df.to_dict("records")
else:
data_df = DataFrame(data, columns=fields)
data_df = data_df.where(data.notnull(), None)
data_table = data_df.to_dict("records")
return data_table
def to_df(data: Union[CursorResult, DataFrame, List[Dict], Iterable[Iterable]], fields: Optional[Iterable] = None) -> DataFrame:
"""
Fetch data to table of DataFrame object.
Parameters
----------
data : Data.
fields : Table fields.
- None : Infer.
- Iterable : Use values in Iterable.
Returns
-------
DataFrame object.
"""
data_type = type(data)
if data_type == CursorResult:
if fields == None:
fields = data.keys()
data_df = DataFrame(data, columns=fields)
elif data_type == DataFrame:
data_df = data.copy()
if fields != None:
data_df.columns = fields
return data_df
else:
data_df = DataFrame(data, columns=fields)
return data_df
def to_json(data: Union[CursorResult, DataFrame, List[Dict], Iterable[Iterable]], fields: Optional[Iterable] = None) -> str:
"""
Fetch data to JSON string.
Parameters
----------
data : Data.
fields : Table fields.
- None : Infer.
- Iterable : Use values in Iterable.
Returns
-------
JSON string.
"""
data_df = to_df(data, fields)
data_json = data_df.to_json(orient="records", force_ascii=False)
return data_json
def to_sql(data: Union[CursorResult, DataFrame, List[Dict], Iterable[Iterable]], fields: Optional[Iterable] = None) -> str:
"""
Fetch data to SQL string.
Parameters
----------
data : Data.
fields : Table fields.
- None : Infer.
- Iterable : Use values in Iterable.
Returns
-------
SQL string.
"""
data_type = type(data)
if data_type == CursorResult:
if fields == None:
fields = data.keys()
else:
data = to_table(data, fields)
fields = data[0].keys()
sql_rows_values = [
[
repr(time_to_str(val, "%Y-%m-%d %H:%M:%S"))
if val != None
else "NULL"
for val in row
]
for row in data
]
sql_rows = [
"SELECT " + ",".join(row_values)
for row_values in sql_rows_values
]
sql_row_first = "SELECT " + ",".join(
[
"%s AS `%s`" % (val, key)
for key, val in list(zip(fields, sql_rows_values[0]))
]
)
sql_rows[0] = sql_row_first
data_sql = " UNION ALL ".join(sql_rows)
return data_sql
def to_html(data: Union[CursorResult, DataFrame, List[Dict], Iterable[Iterable]], fields: Optional[Iterable] = None) -> str:
"""
Fetch data to HTML string.
Parameters
----------
data : Data.
fields : Table fields.
- None : Infer.
- Iterable : Use values in Iterable.
Returns
-------
HTML string.
"""
data_df = to_df(data, fields)
data_html = data_df.to_html(col_space=50, index=False, justify="center")
return data_html
def to_csv(
data: Union[CursorResult, DataFrame, Iterable[Dict], Iterable],
path: str = "table.csv",
fields: Optional[Iterable] = None
) -> DataFrame:
"""
Fetch data to save csv format file.
Parameters
----------
data : Data.
path : File save path.
fields : Table fields.
- None : Infer.
- Iterable : Use values in Iterable.
"""
data_df = to_df(data, fields)
data_df.to_csv(path, mode="a")
return data_df
def to_excel(
data: Union[CursorResult, DataFrame, Iterable[Dict], Iterable],
path: str = "table.xlsx",
group_field: Optional[str] = None,
sheets_set: Dict[Union[str, int], Dict[Literal["name", "index", "filter"], Union[str, int, List[str]]]] = {}
) -> List[Tuple[str, DataFrame]]:
"""
Fetch data to save excel format file and return sheet name and sheet data.
Parameters
----------
data : Data.
path : File save path.
group_field : Group filed.
sheets_set : Set sheet new name and sort sheet and filter sheet fields,
key is old name or index, value is set parameters.
- Parameter 'name' : Set sheet new name.
- Parameter 'index' : Sort sheet.
- Parameter 'filter' : Filter sheet fields.
Returns
-------
Sheet name and sheet data.
"""
if type(data) != DataFrame:
data = to_df(data)
if group_field == None:
data_group = (("Sheet1", data),)
else:
data_group = data.groupby(group_field)
sheets_table_before = []
sheets_table_after = []
for index, sheet_table in enumerate(data_group):
sheet_name, sheet_df = sheet_table
if group_field != None:
del sheet_df[group_field]
if sheet_name in sheets_set:
sheet_set = sheets_set[sheet_name]
elif index in sheets_set:
sheet_set = sheets_set[index]
else:
sheets_table_after.append((sheet_name, sheet_df))
continue
if "name" in sheet_set:
sheet_name = sheet_set["name"]
if "filter" in sheet_set:
sheet_df = sheet_df[sheet_set["filter"]]
if "index" in sheet_set:
sheets_table_before.append((sheet_set["index"], (sheet_name, sheet_df)))
else:
sheets_table_after.append((sheet_name, sheet_df))
sort_func = lambda item: item[0]
sheets_table_before.sort(key=sort_func)
sheets_table = [sheet_table for sheet_index, sheet_table in sheets_table_before] + sheets_table_after
excel = ExcelWriter(path)
for sheet_name, sheet_df in sheets_table:
sheet_df.to_excel(excel, sheet_name, index=False)
excel.close()
return sheets_table
def count(
data: Any,
count_value: Dict = {"size": 0, "total": 0, "types": {}},
surface: bool = True
) -> Dict[Literal["size", "total", "types"], Union[int, Dict[Type, int]]]:
"""
Count data element.
Parameters
----------
data : Data.
count_value : Cumulative Count.
surface : Whether is surface recursion.
Returns
-------
Count data.
"""
data_type = type(data)
count_value["total"] += 1
count_value["types"][data_type] = count_value["types"].get(data_type, 0) + 1
if data_type == dict:
for element in data.values():
count(element, count_value, False)
elif is_iterable(data):
for element in data:
count(element, count_value, False)
else:
count_value["size"] = count_value["size"] + 1
if surface:
sorted_func = lambda key: count_value["types"][key]
sorted_key = sorted(count_value["types"], key=sorted_func, reverse=True)
count_value["types"] = {key: count_value["types"][key] for key in sorted_key}
return count_value
def flatten(data: Any, flattern_data: List = []) -> List:
"""
Flatten data.
"""
data_type = type(data)
if data_type == dict:
for element in data.values():
_ = flatten(element, flattern_data)
elif is_iterable(data):
for element in data:
_ = flatten(element, flattern_data)
else:
flattern_data.append(data)
return flattern_data
def split(data: Iterable, bin_size: Optional[int] = None, share: int = 2) -> List[List]:
"""
Split data into multiple data.
"""
check_least_one(bin_size, share)
data = list(data)
data_len = len(data)
_data = []
_data_len = 0
if bin_size == None:
average = data_len / share
for n in range(share):
bin_size = int(average * (n + 1)) - int(average * n)
_data = data[_data_len:_data_len + bin_size]
_data.append(_data)
_data_len += bin_size
else:
while True:
_data = data[_data_len:_data_len + bin_size]
_data.append(_data)
_data_len += bin_size
if _data_len > data_len:
break
return _data
def de_dup(data: Iterable) -> List:
"""
De duplication of data.
Parameters
----------
data : Data.
Returns
-------
List after de duplication.
"""
data = to_type(data, tuple)
data_de_dup = list(set(data))
data_de_dup.sort(key=data.index)
return data_de_dup | /reyflask-0.2-py3-none-any.whl/reytool/rdata.py | 0.906744 | 0.627438 | rdata.py | pypi |
from typing import Dict, Iterable, Optional, Union
from smtplib import SMTP
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from .rbasic import get_first_notnull
class REmail(object):
"""
Rey's E-mail type.
"""
def __init__(
self,
email_user: Optional[str] = None,
email_password: Optional[str] = None,
title: Optional[str] = None,
text: Optional[str] = None,
attachment: Optional[Dict[str, Union[str, bytes]]] = None,
to_email: Optional[Union[str, Iterable]] = None,
cc_email: Optional[Union[str, Iterable]] = None,
display_from_email: Optional[str] = None,
display_to_email: Optional[Union[str, Iterable]] = None,
display_cc_email: Optional[Union[str, Iterable]] = None
) -> None:
"""
Set E-mail attribute.
"""
self.email_user = email_user
self.email_password = email_password
self.title = title
self.text = text
self.attachment = attachment
self.to_email = to_email
self.cc_email = cc_email
self.display_from_email = display_from_email
self.display_to_email = display_to_email
self.display_cc_email = display_cc_email
def create_email(
self,
title: Optional[str] = None,
text: Optional[str] = None,
attachment: Optional[Dict[str, Union[str, bytes]]] = None,
display_from_email: Optional[str] = None,
display_to_email: Optional[Union[str, Iterable]] = None,
display_cc_email: Optional[Union[str, Iterable]] = None
) -> str:
"""
Create string in E-mail format.
"""
title = get_first_notnull(title, self.title)
text = get_first_notnull(text, self.text)
attachment = get_first_notnull(attachment, self.attachment)
display_from_email = get_first_notnull(display_from_email, self.display_from_email, self.email_user)
display_to_email = get_first_notnull(display_to_email, self.display_to_email, self.to_email)
display_cc_email = get_first_notnull(display_cc_email, self.display_cc_email, self.cc_email)
mime = MIMEMultipart()
if title != None:
mime["subject"] = title
if text != None:
mime_text = MIMEText(text)
mime.attach(mime_text)
if attachment != None:
for file_name, file_data in attachment.items():
if type(file_data) == str:
with open(file_data, "rb") as f:
file_data = f.read()
mime_file = MIMEText(file_data, _charset="utf-8")
mime_file.add_header("content-disposition", "attachment", filename=file_name)
mime.attach(mime_file)
if display_from_email != None:
mime["from"] = display_from_email
if display_to_email != None:
if type(display_to_email) == str:
mime["to"] = display_to_email
else:
mime["to"] = ",".join(display_to_email)
if display_cc_email != None:
if type(display_cc_email) == str:
mime["cc"] = display_cc_email
else:
mime["cc"] = ",".join(display_cc_email)
email_str = mime.as_string()
return email_str
def send_email(
self,
email_user: Optional[str] = None,
email_password: Optional[str] = None,
title: Optional[str] = None,
text: Optional[str] = None,
attachment: Optional[Dict[str, Union[str, bytes]]] = None,
to_email: Optional[Union[str, Iterable]] = None,
cc_email: Optional[Union[str, Iterable]] = None,
display_from_email: Optional[str] = None,
display_to_email: Optional[Union[str, Iterable]] = None,
display_cc_email: Optional[Union[str, Iterable]] = None
) -> None:
"""
Send E-mail.
"""
email_user = get_first_notnull(email_user, self.email_user, default="error")
email_password = get_first_notnull(email_password, self.email_password, default="error")
title = get_first_notnull(title, self.title)
text = get_first_notnull(text, self.text)
attachment = get_first_notnull(attachment, self.attachment)
to_email = get_first_notnull(to_email, self.to_email, default="error")
cc_email = get_first_notnull(cc_email, self.cc_email)
display_from_email = get_first_notnull(display_from_email, self.display_from_email, email_user)
display_to_email = get_first_notnull(display_to_email, self.display_to_email, to_email)
display_cc_email = get_first_notnull(display_cc_email, self.display_cc_email, cc_email)
if type(to_email) == str:
to_email = [to_email]
if cc_email != None:
if type(cc_email) == str:
to_email.append(cc_email)
else:
to_email.extend(cc_email)
email_str = self.create_email(title, text, attachment, display_from_email, display_to_email, display_cc_email)
server_domain_name = email_user.split("@")[-1]
server_host = "smtp." + server_domain_name
server_port = 25
smtp = SMTP(server_host, server_port)
smtp.login(email_user, email_password)
smtp.sendmail(email_user, to_email, email_str)
smtp.quit() | /reyflask-0.2-py3-none-any.whl/reytool/remail.py | 0.732974 | 0.150465 | remail.py | pypi |
.. image:: https://img.shields.io/badge/License-MIT-yellow.svg
:target: https://opensource.org/licenses/MIT
.. image:: https://img.shields.io/badge/python-3.7-blue.svg
:target: https://www.python.org/downloads/release/python-370/
.. image:: https://img.shields.io/pypi/v/reynir-correct
:target: https://pypi.org/project/reynir-correct/
.. image:: https://shields.io/github/v/release/mideind/GreynirCorrect?display_name=tag
:target: https://github.com/mideind/GreynirCorrect/releases
.. image:: https://github.com/mideind/GreynirCorrect/actions/workflows/python-package.yml/badge.svg
:target: https://github.com/mideind/GreynirCorrect/actions?query=workflow%3A%22Python+package%22
==============================================================
GreynirCorrect: Spelling and grammar correction for Icelandic
==============================================================
********
Overview
********
**GreynirCorrect** is a Python 3 (>= 3.7) package and command line tool for
**checking and correcting spelling and grammar** in Icelandic text.
GreynirCorrect relies on the `Greynir <https://pypi.org/project/reynir/>`__ package,
by the same authors, to tokenize and parse text.
GreynirCorrect is documented in detail `here <https://yfirlestur.is/doc/>`__.
The software has three main modes of operation, described below.
As a fourth alternative, you can call the JSON REST API
of `Yfirlestur.is <https://yfirlestur.is>`__
to apply the GreynirCorrect spelling and grammar engine to your text,
as `documented here <https://github.com/mideind/Yfirlestur#https-api>`__.
Token-level correction
----------------------
GreynirCorrect can tokenize text and return an automatically corrected token stream.
This catches token-level errors, such as spelling errors and erroneous
phrases, but not grammatical errors. Token-level correction is relatively fast.
Full grammar analysis
---------------------
GreynirCorrect can analyze text grammatically by attempting to parse
it, after token-level correction. The parsing is done according to Greynir's
context-free grammar for Icelandic, augmented with additional production
rules for common grammatical errors. The analysis returns a set of annotations
(errors and suggestions) that apply to spans (consecutive tokens) within
sentences in the resulting token list. Full grammar analysis is considerably
slower than token-level correction.
Command-line tool
-----------------
GreynirCorrect can be invoked as a command-line tool
to perform token-level correction and, optionally, grammar analysis.
The command is ``correct infile.txt outfile.txt``.
The command-line tool is further documented below.
********
Examples
********
To perform token-level correction from Python code:
.. code-block:: python
>>> from reynir_correct import tokenize
>>> g = tokenize("Af gefnu tilefni fékk fékk daninn vilja sýnum "
>>> "framgengt í auknu mæli.")
>>> for tok in g:
>>> print("{0:10} {1}".format(tok.txt or "", tok.error_description))
Output::
Að Orðasambandið 'Af gefnu tilefni' var leiðrétt í 'að gefnu tilefni'
gefnu
tilefni
fékk Endurtekið orð ('fékk') var fellt burt
Daninn Orð á að byrja á hástaf: 'daninn'
vilja Orðasambandið 'vilja sýnum framgengt' var leiðrétt í 'vilja sínum framgengt'
sínum
framgengt
í Orðasambandið 'í auknu mæli' var leiðrétt í 'í auknum mæli'
auknum
mæli
.
To perform full spelling and grammar analysis of a sentence from Python code:
.. code-block:: python
from reynir_correct import check_single
sent = check_single("Páli, vini mínum, langaði að horfa á sjónnvarpið.")
for annotation in sent.annotations:
print("{0}".format(annotation))
Output::
000-004: P_WRONG_CASE_þgf_þf Á líklega að vera 'Pál, vin minn' / [Pál , vin minn]
009-009: S004 Orðið 'sjónnvarpið' var leiðrétt í 'sjónvarpið'
.. code-block:: python
sent.tidy_text
Output::
'Páli, vini mínum, langaði að horfa á sjónvarpið.'
The ``annotation.start`` and ``annotation.end`` properties
(here ``start`` is 0 and ``end`` is 4) contain the 0-based indices of the first
and last tokens to which the annotation applies.
The ``annotation.start_char`` and ``annotation.end_char`` properties
contain the indices of the first and last character to which the
annotation applies, within the original input string.
``P_WRONG_CASE_þgf_þf`` and ``S004`` are error codes.
For more detailed, low-level control, the ``check_errors()`` function
supports options and can produce various types of output:
.. code-block:: python
from reynir_correct import check_errors
x = "Páli, vini mínum, langaði að horfa á sjónnvarpið."
options = { "input": x, "annotations": True, "format": "text" }
s = check_errors(**options)
for i in s.split("\n"):
print(i)
Output::
Pál, vin minn, langaði að horfa á sjónvarpið.
000-004: P_WRONG_CASE_þgf_þf Á líklega að vera 'Pál, vin minn' | 'Páli, vini mínum,' -> 'Pál, vin minn' | None
009-009: S004 Orðið 'sjónnvarpið' var leiðrétt í 'sjónvarpið' | 'sjónnvarpið' -> 'sjónvarpið' | None
The following options can be specified:
+-----------------------------------+--------------------------------------------------+-----------------+
| | Option | Description | Default value |
+-----------------------------------+--------------------------------------------------+-----------------+
| | ``input`` | Defines the input. Can be a string or an | ``sys.stdin`` |
| | iterable of strings, such as a file object. | |
+-----------------------------------+--------------------------------------------------+-----------------+
| | ``all_errors`` | Defines the level of correction. | ``True`` |
| | (alias ``grammar``) | If False, only token-level annotation is | |
| | carried out. If True, sentence-level | |
| | annotation is carried out. | |
+-----------------------------------+--------------------------------------------------+-----------------+
| | ``annotate_unparsed_sentences`` | If True, sentences that cannot be parsed | ``True`` |
| | are annotated in their entirety as errors. | |
+-----------------------------------+--------------------------------------------------+-----------------+
| | ``generate_suggestion_list`` | If True, annotations can in certain | ``False`` |
| | cases contain a list of possible corrections, | |
| | for the user to pick from. | |
+-----------------------------------+--------------------------------------------------+-----------------+
| | ``suppress_suggestions`` | If True, more farfetched automatically | ``False`` |
| | suggested corrections are suppressed. | |
+-----------------------------------+--------------------------------------------------+-----------------+
| | ``ignore_wordlist`` | The value is a set of strings to whitelist. | ``set()`` |
| | Each string is a word that should not be | |
| | marked as an error or corrected. The comparison | |
| | is case-sensitive. | |
+-----------------------------------+--------------------------------------------------+-----------------+
| | ``one_sent`` | The input contains a single sentence only. | ``False`` |
| | Sentence splitting should not be attempted. | |
+-----------------------------------+--------------------------------------------------+-----------------+
| | ``ignore_rules`` | A set of error codes that should be ignored | ``set()`` |
| | in the annotation process. | |
+-----------------------------------+--------------------------------------------------+-----------------+
| | ``tov_config`` | Path to an additional configuration file that | ``False`` |
| | may be provided for correcting custom | |
| | tone-of-voice issues. | |
+-----------------------------------+--------------------------------------------------+-----------------+
An overview of error codes is available `here <https://github.com/mideind/GreynirCorrect/blob/master/doc/errorcodes.rst>`__.
*************
Prerequisites
*************
GreynirCorrect runs on CPython 3.7 or newer, and on PyPy 3.7 or newer. It has
been tested on Linux, macOS and Windows. The
`PyPi package <https://pypi.org/project/reynir-correct/>`_
includes binary wheels for common environments, but if the setup on your OS
requires compilation from sources, you may need
.. code-block:: bash
$ sudo apt-get install python3-dev
...or something to similar effect to enable this.
************
Installation
************
To install this package (assuming you have Python >= 3.7 with ``pip`` installed):
.. code-block:: bash
$ pip install reynir-correct
If you want to be able to edit the source, do like so
(assuming you have ``git`` installed):
.. code-block:: bash
$ git clone https://github.com/mideind/GreynirCorrect
$ cd GreynirCorrect
$ # [ Activate your virtualenv here if you have one ]
$ pip install -e .
The package source code is now in ``GreynirCorrect/src/reynir_correct``.
*********************
The command line tool
*********************
After installation, the corrector can be invoked directly from the command line:
.. code-block:: bash
$ correct input.txt output.txt
...or:
.. code-block:: bash
$ echo "Þinngið samþikkti tilöguna" | correct
Þingið samþykkti tillöguna
Input and output files are encoded in UTF-8. If the files are not
given explicitly, ``stdin`` and ``stdout`` are used for input and output,
respectively.
Empty lines in the input are treated as sentence boundaries.
By default, the output consists of one sentence per line, where each
line ends with a single newline character (ASCII LF, ``chr(10)``, ``"\n"``).
Within each line, tokens are separated by spaces.
The following (mutually exclusive) options can be specified
on the command line:
+-------------------+---------------------------------------------------+
| | ``--csv`` | Output token objects in CSV |
| | format, one per line. Sentences are separated by |
| | lines containing ``0,"",""`` |
+-------------------+---------------------------------------------------+
| | ``--json`` | Output token objects in JSON format, one per line.|
+-------------------+---------------------------------------------------+
| | ``--normalize`` | Normalize punctuation, causing e.g. quotes to be |
| | output in Icelandic form and hyphens to be |
| | regularized. |
+-------------------+---------------------------------------------------+
| | ``--grammar`` | Output whole-sentence annotations, including |
| | corrections and suggestions for spelling and |
| | grammar. Each sentence in the input is output as |
| | a text line containing a JSON object, terminated |
| | by a newline. |
+-------------------+---------------------------------------------------+
The CSV and JSON formats of token objects are identical to those documented
for the `Tokenizer package <https://github.com/mideind/Tokenizer>`__.
The JSON format of whole-sentence annotations is identical to the one documented for
the `Yfirlestur.is HTTPS REST API <https://github.com/mideind/Yfirlestur#https-api>`__.
Type ``correct -h`` to get a short help message.
Command Line Examples
---------------------
.. code-block:: bash
$ echo "Atvinuleysi jógst um 3%" | correct
Atvinnuleysi jókst um 3%
.. code-block:: bash
$ echo "Barnið vil grænann lit" | correct --csv
6,"Barnið",""
6,"vil",""
6,"grænan",""
6,"lit",""
0,"",""
Note how *vil* is not corrected, as it is a valid and common word, and
the ``correct`` command does not perform grammar checking by default.
.. code-block:: bash
$ echo "Pakkin er fyrir hestin" | correct --json
{"k":"BEGIN SENT"}
{"k":"WORD","t":"Pakkinn"}
{"k":"WORD","t":"er"}
{"k":"WORD","t":"fyrir"}
{"k":"WORD","t":"hestinn"}
{"k":"END SENT"}
To perform whole-sentence grammar checking and annotation as well as spell checking,
use the ``--grammar`` option:
.. code-block:: bash
$ echo "Ég kláraði verkefnið þrátt fyrir að ég var þreittur." | correct --grammar
{
"original":"Ég kláraði verkefnið þrátt fyrir að ég var þreittur.",
"corrected":"Ég kláraði verkefnið þrátt fyrir að ég var þreyttur.",
"tokens":[
{"k":6,"x":"Ég","o":"Ég"},
{"k":6,"x":"kláraði","o":" kláraði"},
{"k":6,"x":"verkefnið","o":" verkefnið"},
{"k":6,"x":"þrátt fyrir","o":" þrátt fyrir"},
{"k":6,"x":"að","o":" að"},
{"k":6,"x":"ég","o":" ég"},
{"k":6,"x":"var","o":" var"},
{"k":6,"x":"þreyttur","o":" þreittur"},
{"k":1,"x":".","o":"."}
],
"annotations":[
{
"start":6,
"end":6,
"start_char":35,
"end_char":37,
"code":"P_MOOD_ACK",
"text":"Hér er réttara að nota viðtengingarhátt
sagnarinnar 'vera', þ.e. 'væri'.",
"detail":"Í viðurkenningarsetningum á borð við 'Z'
í dæminu 'X gerði Y þrátt fyrir að Z' á sögnin að vera
í viðtengingarhætti fremur en framsöguhætti.",
"suggest":"væri"
},
{
"start":7,
"end":7,
"start_char":38,
"end_char":41,
"code":"S004",
"text":"Orðið 'þreittur' var leiðrétt í 'þreyttur'",
"detail":"",
"suggest":"þreyttur"
}
]
}
The output has been formatted for legibility - each input sentence is actually
represented by a JSON object in a single line of text, terminated by newline.
Note that the ``corrected`` field only includes token-level spelling correction
(in this case *þreittur* ``->`` *þreyttur*), but no grammar corrections.
The grammar corrections are found in the ``annotations`` list.
To apply corrections and suggestions from the annotations,
replace source text or tokens (as identified by the ``start`` and ``end``,
or ``start_char`` and ``end_char`` properties) with the ``suggest`` field, if present.
*****
Tests
*****
To run the built-in tests, install `pytest <https://docs.pytest.org/en/latest/>`_,
``cd`` to your ``GreynirCorrect`` subdirectory (and optionally activate your
virtualenv), then run:
.. code-block:: bash
$ python -m pytest
****************
Acknowledgements
****************
Parts of this software are developed under the auspices of the
Icelandic Government's 5-year Language Technology Programme for Icelandic,
which is managed by Almannarómur and described
`here <https://www.stjornarradid.is/lisalib/getfile.aspx?itemid=56f6368e-54f0-11e7-941a-005056bc530c>`__
(English version `here <https://clarin.is/media/uploads/mlt-en.pdf>`__).
*********************
Copyright and License
*********************
.. image:: https://github.com/mideind/GreynirPackage/raw/master/doc/_static/MideindLogoVert100.png?raw=true
:target: https://mideind.is
:align: right
:alt: Miðeind ehf.
**Copyright © 2022 Miðeind ehf.**
GreynirCorrect's original author is *Vilhjálmur Þorsteinsson*.
This software is licensed under the *MIT License*:
*Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:*
*The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.*
*THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.*
----
GreynirCorrect indirectly embeds the `Database of Icelandic Morphology <https://bin.arnastofnun.is>`_
(`Beygingarlýsing íslensks nútímamáls <https://bin.arnastofnun.is>`_), abbreviated BÍN,
along with directly using
`Ritmyndir <https://bin.arnastofnun.is/DMII/LTdata/comp-format/nonstand-form/>`_,
a collection of non-standard word forms.
Miðeind does not claim any endorsement by the BÍN authors or copyright holders.
The BÍN source data are publicly available under the
`CC BY-SA 4.0 license <https://creativecommons.org/licenses/by-sa/4.0/>`_, as further
detailed `here in English <https://bin.arnastofnun.is/DMII/LTdata/conditions/>`_
and `here in Icelandic <https://bin.arnastofnun.is/gogn/mimisbrunnur/>`_.
In accordance with the BÍN license terms, credit is hereby given as follows:
*Beygingarlýsing íslensks nútímamáls. Stofnun Árna Magnússonar í íslenskum fræðum.*
*Höfundur og ritstjóri Kristín Bjarnadóttir.*
| /reynir-correct-3.4.7.tar.gz/reynir-correct-3.4.7/README.rst | 0.886985 | 0.688233 | README.rst | pypi |
from typing import List, Union, overload
try:
from transformers import pipeline # type: ignore
except:
import warnings
warningtext = (
"Tried to import the classifier module without the required packages installed.\n"
"The required packages are in the 'sentence_classifier' extra\n"
"Run 'pip install reynir-correct[sentence_classifier] to install them\n"
"\n"
"Alternatively, install the packages directly with\n"
"'pip install datasets transformers torch' or similar.\n"
)
warnings.warn(warningtext)
raise ImportError(warningtext)
class SentenceClassifier:
_model_name = "mideind/yfirlestur-icelandic-classification-byt5"
_true_label = "1"
_domain_prefix = "has_error "
def __init__(self) -> None:
self.pipe = pipeline(
"text2text-generation", model=self._model_name, tokenizer="google/byt5-base"
)
@overload
def classify(self, text: str) -> bool:
...
@overload
def classify(self, text: List[str]) -> List[bool]:
...
def classify(self, text: Union[str, List[str]]) -> Union[List[bool], bool]:
"""Classify a sentence or sentences.
For each sentence, return true if the sentence probably contains an error."""
if isinstance(text, str):
text = [text]
pipe_result = self.pipe([self._domain_prefix + t for t in text])
result: List[bool] = [
r["generated_text"] == self._true_label for r in pipe_result
]
return result[0] if len(result) == 1 else result
def _main() -> None:
classifier = SentenceClassifier()
many_sents = [
"Þesi settníng er ekki rét sfsett.",
"Þessi setning er rétt stafsett.",
]
many_results = classifier.classify(many_sents)
one_sent = "Þesi er vavasöm."
one_result = classifier.classify(one_sent)
print(f"Sentence: {many_sents[0]}")
print(f"Result: {many_results[0]}")
print(f"Sentence: {many_sents[1]}")
print(f"Result: {many_results[1]}")
print(f"Sentence: {one_sent}")
print(f"Result: {one_result}")
if __name__ == "__main__":
_main() | /reynir-correct-3.4.7.tar.gz/reynir-correct-3.4.7/src/reynir_correct/classifier.py | 0.88467 | 0.345961 | classifier.py | pypi |
from typing import Optional, List
class Annotation:
"""An annotation of a span of a token list for a sentence"""
def __init__(
self,
*,
start: int,
end: int,
code: str,
text: str,
detail: Optional[str] = None,
references: List[str] = [],
original: Optional[str] = None,
suggest: Optional[str] = None,
suggestlist: Optional[List[str]] = None,
is_warning: bool = False,
) -> None:
assert isinstance(start, int)
assert isinstance(end, int)
self._start = start
self._end = end
if is_warning and not code.endswith("/w"):
code += "/w"
self._code = code
# text is a short, straight-to-the-point human-readable description
# of the error
self._text = text
# detail is a more detailed human-readable description of the error,
# containing further explanations, eventually using grammatical terms,
# and possibly links to further reference material (within <a>...</a> tags)
self._detail = detail
# If suggest is given, it is a suggested correction,
# i.e. text that would replace the start..end token span.
# The correction is in the form of token text joined by
# " " spaces, so correct_spaces() should be applied to
# it before displaying it.
self._suggest = suggest
self._original = original
self._suggestlist = suggestlist
self._references = references
def __str__(self) -> str:
"""Return a string representation of this annotation"""
if self._original and self._suggest:
orig_sugg = f" | '{self._original}' -> '{self._suggest}'"
else:
orig_sugg = ""
return "{0:03}-{1:03}: {2:6} {3}{4} | {5}".format(
self._start,
self._end,
self._code,
self._text,
orig_sugg,
self._suggestlist,
)
@property
def start(self) -> int:
"""The index of the first token to which the annotation applies"""
return self._start
@property
def end(self) -> int:
"""The index of the last token to which the annotation applies"""
return self._end
@property
def code(self) -> str:
"""A code for the annotation type, usually an error or warning code"""
# If the code ends with "/w", it is a warning
return self._code
@property
def is_warning(self) -> bool:
"""Return True if this annotation is a warning only"""
return self._code.endswith("/w")
@property
def is_error(self) -> bool:
"""Return True if this annotation is an error"""
return not self._code.endswith("/w")
@property
def text(self) -> str:
"""A description of the annotation"""
return self._text
@property
def detail(self) -> Optional[str]:
"""A detailed description of the annotation, possibly including
links within <a>...</a> tags"""
return self._detail
@property
def original(self) -> Optional[str]:
"""The original text for the error"""
return self._original
@property
def suggest(self) -> Optional[str]:
"""A suggested correction for the token span, as a text string
containing tokens delimited by spaces"""
return self._suggest
@property
def suggestlist(self) -> Optional[List[str]]:
"""A list of suggested corrections for the token span, as a list
of text strings containing tokens delimited by spaces"""
return self._suggestlist
@property
def references(self) -> List[str]:
"""A list of references to the Icelandic Standards"""
return self._references | /reynir-correct-3.4.7.tar.gz/reynir-correct-3.4.7/src/reynir_correct/annotation.py | 0.936496 | 0.580055 | annotation.py | pypi |
from typing import (
List,
Optional,
Sequence,
Tuple,
Iterator,
Iterable,
Dict,
Any,
Union,
cast,
TYPE_CHECKING,
)
if TYPE_CHECKING:
from .classifier import SentenceClassifier
import sys
import argparse
import json
from functools import partial
from typing_extensions import TypedDict
from tokenizer import detokenize, text_from_tokens, normalized_text_from_tokens, TOK
from tokenizer.definitions import AmountTuple, NumberTuple
from .errtokenizer import CorrectToken, Error
from .errtokenizer import tokenize as errtokenize
from .annotation import Annotation
from .checker import GreynirCorrect, check_tokens, load_config
from .settings import Settings
class AnnTokenDict(TypedDict, total=False):
"""Type of the token dictionaries returned from check_errors()"""
# Token kind
k: int
# Token text
x: str
# Original text of token
o: str
# Character offset of token, indexed from the start of the checked text
i: int
class AnnDict(TypedDict):
"""A single annotation, as returned by the Yfirlestur.is API"""
start: int
end: int
start_char: int
end_char: int
code: str
text: str
detail: Optional[str]
suggest: Optional[str]
class AnnResultDict(TypedDict):
"""The annotation result for a sentence"""
original: str
corrected: str
annotations: List[AnnDict]
tokens: List[AnnTokenDict]
TokenSumType = List[Union[List[CorrectToken], CorrectToken]]
# File types for UTF-8 encoded text files
ReadFile = argparse.FileType("r", encoding="utf-8")
WriteFile = argparse.FileType("w", encoding="utf-8")
# Configure our JSON dump function
json_dumps = partial(json.dumps, ensure_ascii=False, separators=(",", ":"))
# Define the command line arguments
def gen(f: Iterator[str]) -> Iterable[str]:
"""Generate the lines of text in the input file"""
yield from f
def quote(s: str) -> str:
"""Return the string s within double quotes, and with any contained
backslashes and double quotes escaped with a backslash"""
if not s:
return '""'
return '"' + s.replace("\\", "\\\\").replace('"', '\\"') + '"'
def val(
t: CorrectToken, quote_word: bool = False
) -> Union[None, str, float, Tuple[Any, ...], Sequence[Any]]:
"""Return the value part of the token t"""
if t.val is None:
return None
if t.kind in {TOK.WORD, TOK.PERSON, TOK.ENTITY}:
# No need to return list of meanings
return None
if t.kind in {TOK.PERCENT, TOK.NUMBER, TOK.CURRENCY}:
return cast(NumberTuple, t.val)[0]
if t.kind == TOK.AMOUNT:
num, iso, _, _ = cast(AmountTuple, t.val)
if quote_word:
# Format as "1234.56|USD"
return '"{0}|{1}"'.format(num, iso)
return num, iso
if t.kind == TOK.S_BEGIN:
return None
if t.kind == TOK.PUNCTUATION:
punct = t.punctuation
return quote(punct) if quote_word else punct
if quote_word and t.kind in {
TOK.DATE,
TOK.TIME,
TOK.DATEABS,
TOK.DATEREL,
TOK.TIMESTAMP,
TOK.TIMESTAMPABS,
TOK.TIMESTAMPREL,
TOK.TELNO,
TOK.NUMWLETTER,
TOK.MEASUREMENT,
}:
# Return a |-delimited list of numbers
return quote("|".join(str(v) for v in cast(Iterable[Any], t.val)))
if quote_word and isinstance(t.val, str):
return quote(t.val)
return t.val
def check_errors(**options: Any) -> str:
"""Return a string in the chosen format and correction level
using the spelling and grammar checker"""
rc = GreynirCorrect(load_config(options), **options)
input = options.get("input", None)
if isinstance(input, str):
options["input"] = [input]
if options.get("all_errors", True):
return check_grammar(rc=rc)
else:
return check_spelling(settings=rc.settings, **options)
def check_spelling(settings: Settings, **options: Any) -> str:
# Initialize sentence accumulator list
# Function to convert a token list to output text
format = options.get("format", "json")
if options.get("spaced", False):
if options.get("normalize", False):
to_text = normalized_text_from_tokens
else:
to_text = text_from_tokens
else:
to_text = partial(detokenize, normalize=True)
toks = sentence_stream(settings=settings, **options)
unisum: List[str] = []
allsum: List[str] = []
annlist: List[str] = []
annotations = options.get("annotations", False)
print_all = options.get("print_all", False)
for toklist in toks:
if format == "text":
txt = to_text(toklist)
if annotations:
for t in toklist:
if t.error:
annlist.append(str(t.error))
if annlist and not print_all:
txt = txt + "\n" + "\n".join(annlist)
annlist = []
unisum.append(txt)
continue
for t in toklist:
if format == "csv":
if t.txt:
allsum.append(
"{0},{1},{2},{3}".format(
t.kind,
quote(t.txt),
val(t, quote_word=True) or '""',
quote(str(t.error) if t.error else ""),
)
)
elif t.kind == TOK.S_END:
# Indicate end of sentence
allsum.append('0,"",""')
elif format == "json":
# Output the tokens in JSON format, one line per token
d: Dict[str, Any] = dict(k=TOK.descr[t.kind])
if t.txt is not None:
d["t"] = t.txt
v = val(t)
if t.kind not in {TOK.WORD, TOK.PERSON, TOK.ENTITY} and v is not None:
d["v"] = v
if isinstance(t.error, Error):
d["e"] = t.error.to_dict()
allsum.append(json_dumps(d))
if allsum:
unisum.extend(allsum)
allsum = []
if print_all:
# We want the annotations at the bottom
unistr = " ".join(unisum)
if annlist:
unistr = unistr + "\n" + "\n".join(annlist)
else:
unistr = "\n".join(unisum)
return unistr
def test_spelling(settings: Settings, **options: Any) -> Tuple[str, TokenSumType]:
# Initialize sentence accumulator list
# Function to convert a token list to output text
if options.get("spaced", False):
if options.get("normalize", False):
to_text = normalized_text_from_tokens
else:
to_text = text_from_tokens
else:
to_text = partial(detokenize, normalize=True)
toks = sentence_stream(settings=settings, **options)
unisum: List[str] = []
toksum: TokenSumType = []
annlist: List[str] = []
print_all = options.get("print_all", False)
for toklist in toks:
unisum.append(to_text(toklist))
if print_all:
toksum.extend(toklist)
else:
toksum.append(toklist)
continue
if print_all:
# We want the annotations at the bottom
unistr = " ".join(unisum)
if annlist:
unistr = unistr + "\n" + "\n".join(annlist)
else:
unistr = "\n".join(unisum)
return unistr, toksum
def sentence_stream(settings: Settings, **options: Any) -> Iterator[List[CorrectToken]]:
"""Yield a stream of sentence token lists from the source text"""
# Initialize sentence accumulator list
curr_sent: List[CorrectToken] = []
gen = options.get("input", None)
if gen is None:
gen = sys.stdin
for t in errtokenize(gen, settings, **options):
# Normal shallow parse, one line per sentence,
# tokens separated by spaces
curr_sent.append(t)
if t.kind in TOK.END:
# End of sentence/paragraph
yield curr_sent
curr_sent = []
if curr_sent:
yield curr_sent
def test_grammar(rc: GreynirCorrect, **options: Any) -> Tuple[str, TokenSumType]:
"""Do a full spelling and grammar check of the source text"""
accumul: List[str] = []
offset = 0
alltoks: TokenSumType = []
inneroptions: Dict[str, Union[str, bool]] = {}
inneroptions["annotate_unparsed_sentences"] = options.get(
"annotate_unparsed_sentences", True
)
inneroptions["ignore_rules"] = options.get("ignore_rules", set())
annlist: List[str] = []
for toklist in sentence_stream(rc.settings, **options):
# Invoke the spelling and grammar checker on the token list
# Only contains options relevant to the grammar check
sent = check_tokens(toklist, rc, **inneroptions)
if sent is None:
# Should not happen?
continue
# Maintain token character offsets, accumulated over the entire source text
token_offsets: Dict[int, int] = dict()
for ix, t in enumerate(toklist):
token_offsets[ix] = offset
offset += len(t.original or t.txt or "")
# Extract the annotation list (defensive programming here)
a: List[Annotation] = getattr(sent, "annotations", cast(List[Annotation], []))
# Sort in ascending order by token start index, and then by end index
# (more narrow/specific annotations before broader ones)
a.sort(key=lambda ann: (ann.start, ann.end))
arev = sorted(a, key=lambda ann: (ann.start, ann.end), reverse=True)
cleantoklist: List[CorrectToken] = toklist[:]
alltoks.extend(cleantoklist)
for xann in arev:
if xann.suggest is None:
# Nothing to correct with, nothing we can do
continue
cleantoklist[xann.start + 1].txt = xann.suggest
if xann.end > xann.start:
# Annotation spans many tokens
# "Okkur börnunum langar í fisk"
# "Leita að kílómeter af féinu" → leita að kílómetri af fénu → leita að kílómetra af fénu
# "dást af þeim" → "dást að þeim"
# Single-token annotations for this span have already been handled
# Only case is one ann, many toks in toklist
# Give the first token the correct value
# Delete the other tokens
del cleantoklist[xann.start + 2 : xann.end + 2]
txt = detokenize(cleantoklist, normalize=True)
if options.get("annotations", False):
for aann in a:
annlist.append(str(aann))
if annlist and not options.get("print_all", False):
txt = txt + "\n" + "\n".join(annlist)
annlist = []
accumul.append(txt)
accumstr = "\n".join(accumul)
return accumstr, alltoks
def check_grammar(rc: GreynirCorrect) -> str:
"""Do a full spelling and grammar check of the source text"""
inneroptions: Dict[str, Union[str, bool]] = {}
inneroptions["annotate_unparsed_sentences"] = rc._options.get(
"annotate_unparsed_sentences", True
)
inneroptions["ignore_rules"] = rc._options.get("ignore_rules", set())
sentence_results: List[Dict[str, Any]] = []
sentence_classifier: Optional[SentenceClassifier] = None
for raw_tokens in sentence_stream(settings=rc.settings, **rc._options):
original_sentence = "".join([t.original or t.txt for t in raw_tokens])
if rc._options.get("sentence_prefilter", False):
# Only construct the classifier model if we need it
from .classifier import SentenceClassifier
if sentence_classifier is None:
sentence_classifier = SentenceClassifier()
if not sentence_classifier.classify(original_sentence):
# Skip the full parse if we think the sentence is probably correct
# Remove the metatokens (e.g. sentence-begin) from the token stream
# to be consistent with what the parser returns.
# TODO: use TOK.META_BEGIN when that PR is ready
tokens_no_meta = [t for t in raw_tokens if t.kind < TOK.S_SPLIT]
nice_tokens = [
AnnTokenDict(k=d.kind, x=d.txt, o=d.original or d.txt)
for d in tokens_no_meta
]
sentence_results.append(
{
"original": original_sentence,
"corrected": original_sentence,
"partially_corrected": original_sentence,
"annotations": [],
"tokens": nice_tokens,
}
)
continue
annotated_sentence = check_tokens(raw_tokens, rc, **inneroptions)
if annotated_sentence is None:
# This should not happen, but we check to be sure, and to satisfy mypy
# TODO: Should we rather raise an exception instead of silently discarding the sentence?
continue
# Extract the annotation list (defensive programming here)
annotations: List[Annotation] = getattr(
annotated_sentence, "annotations", cast(List[Annotation], [])
)
# Sort in ascending order by token start index, and then by end index
# (more narrow/specific annotations before broader ones)
annotations.sort(key=lambda ann: (ann.start, ann.end))
# Generate a sentence with only spelling corrections applied
partially_corrected_sentence = detokenize(annotated_sentence.tokens)
# Generate a sentence with all corrections applied
full_correction_toks = annotated_sentence.tokens[:]
for ann in annotations[::-1]:
if ann.suggest is None:
# Nothing to correct with, nothing we can do
continue
full_correction_toks[ann.start].txt = ann.suggest
if ann.end > ann.start:
# Annotation spans many tokens
# "Okkur börnunum langar í fisk"
# "Leita að kílómeter af féinu" → leita að kílómetri af fénu → leita að kílómetra af fénu
# "dást af þeim" → "dást að þeim"
# Single-token annotations for this span have already been handled
# Only case is one ann, many toks in toklist
# Give the first token the correct value
# Delete the other tokens
del full_correction_toks[ann.start + 1 : ann.end + 1]
fully_corrected_sentence = detokenize(full_correction_toks)
# Make a nice token list
tokens: List[AnnTokenDict]
if annotated_sentence.tree is None:
# Not parsed: use the raw token list
tokens = [
AnnTokenDict(k=d.kind, x=d.txt, o=d.original or d.txt)
for d in annotated_sentence.tokens
]
else:
# Successfully parsed: use the text from the terminals (where available)
# since we have more info there, for instance on em/en dashes.
# Create a map of token indices to corresponding terminal text
assert annotated_sentence.terminals is not None
token_map = {t.index: t.text for t in annotated_sentence.terminals}
tokens = [
AnnTokenDict(
k=d.kind, x=token_map.get(ix, d.txt), o=d.original or d.txt
)
for ix, d in enumerate(annotated_sentence.tokens)
]
sentence_results.append(
{
"original": original_sentence,
"partially_corrected": partially_corrected_sentence,
"corrected": fully_corrected_sentence,
"annotations": annotations,
"tokens": tokens,
}
)
extra_text_options = {
"annotations": rc._options.get("annotations", False),
"print_all": rc._options.get("print_all", False),
}
return format_output(
sentence_results, rc._options.get("format", "json"), extra_text_options
)
def format_output(
sentence_results: List[Dict[str, Any]],
format_type: str,
extra_text_options: Dict[str, Any],
) -> str:
"""
Format grammar analysis results in the given format.
`sentence_results` is a list of individual sentences and their analysis
`format_type` is the output format to use, one of 'text', 'json', 'csv', 'm2'
`extra_text_options` takes extra options for the text format. Ignored for other formats.
"""
if format_type == "text":
return format_text(sentence_results, extra_text_options)
elif format_type == "json":
return format_json(sentence_results)
elif format_type == "csv":
return format_csv(sentence_results)
elif format_type == "m2":
return format_m2(sentence_results)
raise Exception(f"Tried to format with invalid format: {format_type}")
def format_text(
sentence_results: List[Dict[str, Any]], extra_options: Dict[str, Any]
) -> str:
output: List[str] = []
annlist: List[str] = []
for result in sentence_results:
txt = result["corrected"]
if extra_options["annotations"]:
for aann in result["annotations"]:
annlist.append(str(aann))
if annlist and not extra_options["print_all"]:
txt = txt + "\n" + "\n".join(annlist)
annlist = []
output.append(txt)
return "\n".join(output)
def format_json(sentence_results: List[Dict[str, Any]]) -> str:
formatted_sentences: List[str] = []
offset = 0
for result in sentence_results:
tokens = result["tokens"]
token_offsets: List[int] = []
for t in tokens:
token_offsets.append(offset)
offset += len(t["o"] or t["x"] or "")
# Add a past-the-end offset to make later calculations more convenient
token_offsets.append(offset)
# Convert the annotations to a standard format before encoding in JSON
formatted_annotations: List[AnnDict] = [
AnnDict(
# Start token index of this annotation
start=ann.start,
# End token index (inclusive)
end=ann.end,
# Character offset of the start of the annotation in the original text
start_char=token_offsets[ann.start],
# Character offset of the end of the annotation in the original text
# (inclusive, i.e. the offset of the last character)
end_char=token_offsets[ann.end + 1] - 1,
code=ann.code,
text=ann.text,
detail=ann.detail or "",
suggest=ann.suggest or "",
)
for ann in result["annotations"]
]
ard = AnnResultDict(
original=result["original"],
corrected=result["corrected"],
tokens=tokens,
annotations=formatted_annotations,
)
formatted_sentences.append(json.dumps(ard))
return "\n".join(formatted_sentences)
def format_csv(sentence_results: List[Dict[str, Any]]) -> str:
accumul: List[str] = []
for result in sentence_results:
for ann in result["annotations"]:
accumul.append(
"{},{},{},{},{},{}".format(
ann.code,
ann.original,
ann.suggest,
ann.start,
ann.end,
ann.suggestlist,
)
)
return "\n".join(accumul)
def format_m2(sentence_results: List[Dict[str, Any]]) -> str:
accumul: List[str] = []
for result in sentence_results:
accumul.append("S {0}".format(" ".join([t["x"] for t in result["tokens"]])))
for ann in result["annotations"]:
accumul.append(
"A {0} {1}|||{2}|||{3}|||REQUIRED|||-NONE-|||0".format(
ann.start, ann.end, ann.code, ann.suggest
)
)
accumul.append("")
return "\n".join(accumul) | /reynir-correct-3.4.7.tar.gz/reynir-correct-3.4.7/src/reynir_correct/wrappers.py | 0.68342 | 0.245074 | wrappers.py | pypi |
from typing import List, Tuple, Dict, Literal, Optional, Union
from os.path import abspath as os_abspath, basename as os_basename
from urllib.parse import urlsplit as urllib_urlsplit, quote as urllib_quote, unquote as urllib_unquote
from requests.api import request as requests_request
from requests.models import Response
from filetype import guess as filetype_guess
from .rfile import read_file
from .rregular import search
def url_join(url: str, params: Dict) -> str:
"""
Join `URL` and `parameters`.
Parameters
----------
url : URL.
params : Parameters of URL.
Returns
-------
Joined URL.
"""
# Join parameters.
params_str = "&".join(
[
"%s=%s" % (key, urllib_quote(val))
for key, val in params.items()
]
)
# Join URL.
if "?" not in url:
url += "?"
elif url[-1] != "?":
url += "&"
url += params_str
return url
def url_split(url: str) -> Tuple[str, Dict[str, str]]:
"""
Split `URL` and `parameters`.
Parameters
----------
url : URL.
Returns
-------
Split URL and parameters.
"""
# Split URL.
split_result = urllib_urlsplit(url)
params_str = split_result.query
url = split_result.scheme + "://" + split_result.netloc + split_result.path
# Split parameters.
params = {
key: urllib_unquote(val)
for key, val in map(
lambda item: item.split("=", 1),
params_str.split("&")
)
}
return url, params
def cookie_join(params: Dict[str, str]) -> str:
"""
Join parameters of `Cookie`.
Parameters
----------
params : Parameters.
Returns
-------
Joined cookie.
"""
# Join.
cookie = "; ".join(
[
"%s=%s" % (key, val)
for key, val in params.items()
]
)
return cookie
def cookie_split(cookie: str) -> Dict[str, str]:
"""
Split parameters of `Cookie`.
Parameters
----------
cookie : Cookie.
Returns
-------
Split parameters
"""
# Split parameters.
params = {
key: val
for key, val in map(
lambda item: item.split("=", 1),
cookie.split("; ")
)
}
return params
def content_type(file: Union[str, bytes]) -> str:
"""
Guess HTTP `content type` of file.
Parameters
----------
file : File path or bytes data.
Returns
-------
HTTP content type.
"""
# Guess.
file_type_obj = filetype_guess(file)
if file_type_obj is not None:
return file_type_obj.MIME
def request(
url: str,
params: Optional[Dict] = None,
data: Optional[Union[Dict, str, bytes]] = None,
json: Optional[Dict] = None,
files: Optional[Dict[str, Union[str, bytes, Tuple[Union[str, bytes], dict]]]] = None,
headers: Dict = {},
timeout: Optional[float] = None,
proxies: Dict[str, str] = {},
method: Optional[Literal["get", "post", "put", "patch", "delete"]] = None,
throw_e: bool = False
) -> Response:
"""
`Send` request.
Parameters
----------
url : Request URL.
params : Request URL add parameters.
data : Request body data.
- `Dict` : Convert to `key=value&...` format bytes.
Auto set `Content-Type` to `application/x-www-form-urlencoded`.
- `str` : File path to read file bytes data.
Auto set `Content-Type` to file media type, and `filename` to file name.
- `bytes` : File bytes data.
Auto set `Content-Type` to file media type.
json : Request body data, convert to `JSON` format.
Auto set `Content-Type` to `application/json`.
files : Request body data, convert to `multi form` format.
Auto set `Content-Type` to `multipart/form-data`.
- `Dict[str, str]` : Parameter name and File path to read file bytes data.
Auto set `Content-Type` to file media type, and `filename` to file name.
- `Dict[str, bytes]` : Parameter name and file bytes data.
- `Dict[str, Tuple[str, dict]` : Parameter name and File path to read file bytes data and other parameters.
Auto set `Content-Type` to file media type, and `filename` to file name.
- `Dict[str, Tuple[bytes, dict]` : Parameter name and file bytes data and other parameters.
headers : Request header data.
timeout : Request maximun waiting time.
- `None` : No limit.
- `Union[int, float]` : Use this value.
proxies : Proxy IP setup.
- `None` : No setup.
- `Dict[str, str]` : Name and use IP of each protocol.
method : Request method.
- `None` : Automatic judge.
* When parameter `data` or `json` or `files` not has value, then request method is `get`.
* When parameter `data` or `json` or `files` has value, then request method is `post`.
- `Literal['get', 'post', 'put', 'patch', 'delete']` : Use this request method.
throw_e : Whether throw `exception`, when response code is not `200`.
Returns
-------
Response object of requests package.
"""
# Handle parameters.
if method is None:
if data is None and json is None and files is None:
method = "get"
else:
method = "post"
if files is None:
if data.__class__ == str:
if "Content-Disposition" not in headers:
file_name = os_basename(data)
headers["Content-Disposition"] = "attachment; filename=%s" % file_name
data = read_file(data)
if data.__class__ == bytes:
if "Content-Type" not in headers:
headers["Content-Type"] = content_type(data)
else:
for key, val in files.items():
if val.__class__ == tuple:
item_data, item_headers = val
else:
item_data, item_headers = val, {}
if item_data.__class__ == str:
if "filename" not in item_headers:
item_headers["filename"] = os_basename(item_data)
item_data = read_file(item_data)
if item_data.__class__ == bytes:
if "Content-Type" not in item_headers:
item_headers["Content-Type"] = content_type(item_data)
files[key] = item_headers.get("filename", key), item_data, item_headers.get("Content-Type"), item_headers
# Request.
response = requests_request(
method,
url,
params=params,
data=data,
json=json,
files=files,
headers=headers,
timeout=timeout,
proxies=proxies,
)
# Set encod type.
if response.encoding == "ISO-8859-1":
response.encoding = "utf-8"
# Throw exception.
assert not (throw_e and response.status_code != 200), "response code is not 200, but %s" % response.status_code
return response
def download(url: str, path: Optional[str] = None) -> str:
"""
`Download` file from URL.
Parameters
----------
url : Download URL.
path : Save path.
- `None` : File name is `download` and auto judge file type.
Returns
-------
File absolute path.
"""
# Download.
response = request(url)
content = response.content
# Judge file type and path.
if path is None:
Content_disposition = response.headers.get("Content-Disposition", "")
if "filename" in Content_disposition:
file_name = search(
"filename=['\"]?([^\s'\"]+)",
Content_disposition
)
else:
file_name = None
if file_name is None:
file_type_obj = filetype_guess(content)
if file_type_obj is not None:
file_name = "download." + file_type_obj.EXTENSION
if file_name is None:
file_name = "download"
path = os_abspath(file_name)
# Save.
with open(path, "wb") as file:
file.write(content)
return path | /reyworm-0.6-py3-none-any.whl/reytool/rrequest.py | 0.859428 | 0.309369 | rrequest.py | pypi |
from typing import List, Tuple, Optional, Union, Literal, overload
from re import search as re_search, sub as re_sub, findall as re_findall
from .rdata import unique
def search(pattern: str, text: str) -> Optional[Union[str, Tuple[Optional[str], ...]]]:
"""
Regular `matching` text.
Parameters
----------
pattern : Regular pattern.
text : Match text.
Returns
-------
Matching result.
- When match to and not use `group`, then return string.
- When match to and use `group`, then return tuple with value string or None.
If tuple length is `1`, extract and return string.
- When no match, then return None.
"""
# Search.
obj_re = re_search(pattern, text)
# Return result.
if obj_re is not None:
result = obj_re.groups()
if result == ():
result = obj_re[0]
elif len(result) == 1:
result = obj_re[1]
return result
@overload
def search_batch(text: str, *patterns: str, first: bool = True) -> Union[
Optional[Union[str, Tuple[Optional[str], ...]]],
List[Optional[Union[str, Tuple[Optional[str], ...]]]]
]: ...
@overload
def search_batch(first: Literal[True]) -> Optional[Union[str, Tuple[Optional[str], ...]]]: ...
@overload
def search_batch(first: Literal[False]) -> List[Optional[Union[str, Tuple[Optional[str], ...]]]]: ...
def search_batch(text: str, *patterns: str, first: bool = True) -> Union[
Optional[Union[str, Tuple[Optional[str], ...]]],
List[Optional[Union[str, Tuple[Optional[str], ...]]]]
]:
"""
`Batch` regular `search` text.
Parameters
----------
text : Match text.
pattern : Regular pattern.
first : Whether return first successful match.
Returns
-------
Matching result.
- When match to and not use group, then return string.
- When match to and use group, then return tuple with value string or None.
- When no match, then return.
"""
# Search.
## Return first result.
if first:
for pattern in patterns:
result = search(pattern, text)
if result is not None:
return result
## Return all result.
else:
result = [search(pattern, text) for pattern in patterns]
return result
def sub_batch(text: str, *patterns: Tuple[str, str]) -> str:
"""
`Batch` regular `replace` text.
Parameters
----------
text : Match text.
patterns : Regular pattern and replace text.
Returns
-------
Replaced result.
"""
# Replace.
for pattern, replace in patterns:
text = re_sub(pattern, replace, text)
return text
def findall_batch(text: str, *patterns: str) -> str:
"""
`Batch` regular `find all` text.
Parameters
----------
text : Match text.
patterns : Regular pattern.
Returns
-------
List of Find result.
"""
# Find all.
texts = [
string
for pattern in patterns
for string in re_findall(pattern, text)
]
# De duplicate.
texts = unique(texts)
return texts | /reyworm-0.6-py3-none-any.whl/reytool/rregular.py | 0.955569 | 0.485661 | rregular.py | pypi |
from typing import Callable, Any, Generator, Optional
from tqdm import tqdm
from concurrent.futures import ThreadPoolExecutor, as_completed
from .rwrap import update_tqdm
def threads(
func: Callable,
*args: Any,
max_workers: Optional[int] = None,
thread_name: Optional[str] = None,
timeout: Optional[int] = None,
to_tqdm: bool = True,
**kwargs: Any
) -> Generator:
"""
Concurrent `multi tasks` using thread pool.
Parameters
----------
func : Task function.
args : Position parameter of input parameter task function.
max_workers: Maximum number of threads.
- `None` : Number of CPU + 4, 32 maximum.
- `int` : Use this value, no maximum limit.
thread_name: Thread name prefix and progress bar description.
- `None` : Thread name prefix is `ThreadPoolExecutor-%d` % index, and no progress bar.
- `str` : Use this value.
timeout : Call generator maximum waiting second, overdatetime throw exception.
- `None` : Unlimited.
- `int` : Use this value.
to_tqdm : Whether print progress bar.
kwargs : Keyword parameter of input parameter task function.
Returns
-------
Generator with multi Future object, object from concurrent package.
When called, it will block until all tasks are completed.
When `for` syntax it, the task that complete first return first.
Examples
--------
Get value.
>>> results = [future.result() for future in Generator]
"""
# Handle parameters.
if thread_name is None:
thread_name = func.__name__
params_lens = {len(param) for param in args}
params_lens -= {1}
min_param_len = min(params_lens)
args = [
list(param) * min_param_len
if len(param) == 1
else param
for param in args
]
kwargs = [
[[key, val]] * min_param_len
if len(val) == 1
else [
[key, param]
for param in val
]
for key, val in kwargs.items()
]
if args:
args = zip(*args)
else:
args = [[]] * min_param_len
if kwargs:
kwargs = zip(*kwargs)
kwargs = [dict(param) for param in kwargs]
else:
kwargs = [{}] * min_param_len
params = zip(args, kwargs)
# Create thread pool.
thread_pool = ThreadPoolExecutor(max_workers, thread_name)
# Add progress bar.
if to_tqdm:
tqdm_desc = "ThreadPool " + thread_name
obj_tqdm = tqdm(desc=tqdm_desc, total=min_param_len)
func = update_tqdm(func, obj_tqdm, _execute=False)
# Start thread pool.
tasks = [thread_pool.submit(func, *args, **kwargs) for args, kwargs in params]
# Return generator.
obj_tasks = as_completed(tasks, timeout)
return obj_tasks | /reyworm-0.6-py3-none-any.whl/reytool/rmultitask.py | 0.891315 | 0.32599 | rmultitask.py | pypi |
from typing import Any, List, Tuple, Iterable, Callable, Type, Literal, Optional, Union
from types import TracebackType
from sys import exc_info
from traceback import format_exc
from warnings import warn as warnings_warn
from varname import argname
def warn(*warn_infos: Any, warn_type: Type[BaseException] = UserWarning, stacklevel: int = 3) -> None:
"""
Throw `warning`.
Parameters
----------
warn_info : Warn informations.
warn_type : Warn type.
stacklevel : Warning code location, number of recursions up the code level.
"""
# Handle parameters.
if warn_infos == ():
warn_infos = "Warning!"
elif len(warn_infos) == 1:
if warn_infos[0].__class__ == str:
warn_infos = warn_infos[0]
else:
warn_infos = str(warn_infos[0])
else:
warn_infos = str(warn_infos)
# Throw warning.
warnings_warn(warn_infos, warn_type, stacklevel)
def exc(report: Optional[str] = None) -> Tuple[str, BaseException, Any, TracebackType]:
"""
Return exception information and print, must used in `except` syntax.
Parameters
----------
report : Print report title.
- `None` : Not print.
- `str` : Print and use this title.
Returns
-------
Exception information text and exception type and exception object and exception position object.
"""
# Get exception information.
exception_report = format_exc()
exception_report = exception_report.strip()
exception_info = exc_info()
exception = exception_report, *exception_info
# Report.
if report is not None:
## Import.
from .rtext import rprint
## Execute.
rprint(exception_report, title=report, frame="half")
return exception
def check_target(value: Any, *targets: Union[Any, Literal["_iterable"]], check_element: bool = False) -> None:
"""
Check the content or type of the value, when check fail, then throw `exception`.
Parameters
---------
value : Check object.
targets : Correct target, can be type.
- `Any` : Check whether it is the target.
- `Literal['_iterable']` : Check whether it can be iterable.
check_element : Whether check element in value.
"""
# Handle parameters.
if check_element:
values = value
else:
values = [value]
# Check.
for element in values:
## Check sub elements.
if "_iterable" in targets and is_iterable(element):
continue
## Right target.
if element.__class__ in targets:
continue
for target in targets:
if element is target:
continue
## Throw exception.
var_name = get_name(value)
if var_name is not None:
var_name = " '%s'" % var_name
else:
var_name = ""
correct_targets_str = ", ".join([repr(target) for target in targets])
if check_element:
raise ValueError(
"parameter%s the elements content or type must in [%s], now: %s" % (
var_name,
correct_targets_str,
repr(value)
)
)
else:
raise ValueError(
"parameter%s the content or type must in [%s], now: %s" % (
var_name,
correct_targets_str,
repr(value)
)
)
def check_least_one(*values: Any) -> None:
"""
Check that at least one of multiple values is not `None`, when check fail, then throw `exception`.
Parameters
----------
values : Check values.
"""
# Check.
for value in values:
if value is not None:
return
# Throw exception.
vars_name = get_name(values)
if vars_name is not None:
vars_name_de_dup = list(set(vars_name))
vars_name_de_dup.sort(key=vars_name.index)
vars_name_str = " " + " and ".join(["'%s'" % var_name for var_name in vars_name_de_dup])
else:
vars_name_str = ""
raise ValueError("at least one of parameters%s is not None" % vars_name_str)
def check_most_one(*values: Any) -> None:
"""
Check that at most one of multiple values is not `None`, when check fail, then throw `exception`.
Parameters
----------
values : Check values.
"""
# Check.
none_count = 0
for value in values:
if value is not None:
none_count += 1
# Throw exception.
if none_count > 1:
vars_name = get_name(values)
if vars_name is not None:
vars_name_de_dup = list(set(vars_name))
vars_name_de_dup.sort(key=vars_name.index)
vars_name_str = " " + " and ".join(["'%s'" % var_name for var_name in vars_name_de_dup])
else:
vars_name_str = ""
raise ValueError("at most one of parameters%s is not None" % vars_name_str)
def is_iterable(obj: Any, exclude_types: Iterable[Type] = [str, bytes]) -> bool:
"""
Judge whether it is `iterable`.
Parameters
----------
obj : Judge object.
exclude_types : Non iterative types.
Returns
-------
Judgment result.
"""
# Exclude types.
if obj.__class__ in exclude_types:
return False
# Judge.
try:
obj_dir = obj.__dir__()
except TypeError:
return False
if "__iter__" in obj_dir:
return True
else:
return False
def is_table(obj: Any, check_fields: bool = True) -> bool:
"""
Judge whether it is `List[Dict]` table format and keys and keys sort of the Dict are the same.
Parameters
----------
obj : Judge object.
check_fields : Do you want to check the keys and keys sort of the Dict are the same.
Returns
-------
Judgment result.
"""
# Judge.
if obj.__class__ != list:
return False
for element in obj:
if element.__class__ != dict:
return False
## Check fields of table.
if check_fields:
keys_strs = [
":".join([str(key) for key in element.keys()])
for element in obj
]
keys_strs_only = set(keys_strs)
if len(keys_strs_only) != 1:
return False
return True
def is_number_str(string: str) -> bool:
"""
Judge whether it is `number` string.
Parameters
----------
string : String.
return_value : Whether return value.
Returns
-------
Judgment result.
"""
# Judge.
try:
int(string)
except ValueError:
return False
return True
def get_first_notnull(
*values: Any,
default: Union[None, Any, Literal["exception"]] = None,
null_values: List = [None]) -> Any:
"""
Get the first value that is not `None`.
Parameters
----------
values : Check values.
default : When all are null, then return this is value, or throw exception.
- `Any` : Return this is value.
- `Literal['exception']` : Throw `exception`.
null_values : Range of null values.
Returns
-------
Return first not null value, when all are `None`, then return default value.
"""
# Get value.
for value in values:
if value not in null_values:
return value
# Throw exception.
if default == "exception":
vars_name = get_name(values)
if vars_name is not None:
vars_name_de_dup = list(set(vars_name))
vars_name_de_dup.sort(key=vars_name.index)
vars_name_str = " " + " and ".join(["'%s'" % var_name for var_name in vars_name_de_dup])
else:
vars_name_str = ""
raise ValueError("at least one of parameters%s is not None" % vars_name_str)
return default
def to_type(obj: Any, to_type: Type, method: Optional[Callable] = None) -> Any:
"""
Convert object `type`.
Parameters
----------
obj : Convert object.
to_type : Target type.
method : Convert method.
- `None` : Use value of parameter `to_type`.
- `Callable` : Use this method.
Returns
-------
Converted object.
"""
# Judge type.
if obj.__class__ == to_type:
return obj
# Convert type.
if method is not None:
return method(obj)
else:
return to_type(obj)
def get_name(obj: Any, frame: int = 2) -> Optional[Union[str, Tuple[str, ...]]]:
"""
Get object `name`.
Parameters
----------
obj : Object.
frame : Number of code to upper level.
Returns
-------
Object name or None.
"""
# Get name using built in method.
try:
name = obj.__name__
except AttributeError:
# Get name using module method.
name = "obj"
try:
for _frame in range(1, frame + 1):
name = argname(name, frame=_frame)
if name.__class__ != str:
if "".join(name) == "":
name = None
except:
name = None
return name | /reyworm-0.6-py3-none-any.whl/reytool/rbase.py | 0.839076 | 0.310472 | rbase.py | pypi |
from typing import Any, Dict, Literal, Optional, Union, overload
from pandas import DataFrame, concat as pd_concat
from time import time as time_time, sleep as time_sleep
from datetime import (
datetime as datetime_datetime,
date as datetime_date,
time as datetime_time,
timedelta as datetime_timedelta
)
from .rbase import check_target, is_number_str
from .rother import randn
from .rtext import rprint
@overload
def now(
format: Literal["datetime", "date", "time", "timestamp", "datetime_str", "date_str", "time_str"] = "datetime_str"
) -> Union[datetime_datetime, datetime_date, datetime_time, int, str]: ...
@overload
def now(format: Literal["datatime"]) -> datetime_datetime: ...
@overload
def now(format: Literal["date"]) -> datetime_date: ...
@overload
def now(format: Literal["time"]) -> datetime_time: ...
@overload
def now(format: Literal["datetime_str", "date_str", "time_str"]) -> str: ...
@overload
def now(format: Literal["timestamp"]) -> int: ...
def now(
format: Literal["datetime", "date", "time", "datetime_str", "date_str", "time_str", "timestamp"] = "datetime_str"
) -> Union[datetime_datetime, datetime_date, datetime_time, str, int]:
"""
Get `current` time string or intger or object.
Parameters
----------
format : Format type.
- `Literal[`datetime`]` : Return datetime object of datetime package.
- `Literal[`date`]` : Return date object of datetime package.
- `Literal[`time`]` : Return time object of datetime package.
- `Literal[`datetime_str`]` : Return string in format `%Y-%m-%d %H:%M:%S`.
- `Literal[`date_str`]` : Return string in format `%Y-%m-%d`.
- `Literal[`time_str`]` : Return string in foramt `%H:%M:%S`.
- `Literal[`timestamp`]` : Return time stamp in milliseconds.
Returns
-------
Time string or object of datetime package.
"""
# Return time object by parameter format.
if format == "datetime":
return datetime_datetime.now()
elif format == "date":
return datetime_datetime.now().date()
elif format == "time":
return datetime_datetime.now().time()
elif format == "datetime_str":
return datetime_datetime.now().strftime("%Y-%m-%d %H:%M:%S")
elif format == "date_str":
return datetime_datetime.now().strftime("%Y-%m-%d")
elif format == "time_str":
return datetime_datetime.now().strftime("%H:%M:%S")
elif format == "timestamp":
return int(time_time() * 1000)
@overload
def time2str(
object_: Union[datetime_datetime, datetime_date, datetime_time, datetime_timedelta, int, Any],
format: Optional[str] = None,
throw_e: bool = False
) -> Union[str, Any]: ...
@overload
def time2str(object_: Union[datetime_datetime, datetime_date, datetime_time, datetime_timedelta, int]) -> str: ...
@overload
def time2str(object_: Any) -> Any: ...
def time2str(
object_: Union[datetime_datetime, datetime_date, datetime_time, datetime_timedelta, int, Any],
format: Optional[str] = None,
throw_e: bool = False
) -> Union[str, Any]:
"""
Format time object of package `datetime` to string.
Parameters
----------
object_ : Object of `datetime` package or int.
format : Format string.
- `None` : Automatic by type.
* Parameter `object_` is datetime_datetime : Is `%Y-%m-%d %H:%M:%S`.
* Parameter `object_` is datetime_date : Is `%Y-%m-%d`.
* Parameter `object_` is datetime_time : Is `%H:%M:%S`.
* Parameter `object_` is datetime_timedelta : Is f`{days} %H:%M:%S`.
* Parameter `object_` is time stamp : Is `%Y-%m-%d %H:%M:%S`.
- `str` : Format by this value.
throw_e : Whether throw exception, when parameter `object_` value error, otherwise return original value.
Returns
-------
String after foramt or original value.
"""
# Check parameters.
if throw_e:
check_target(object_, datetime_datetime, datetime_date, datetime_time, datetime_timedelta, int)
# Convert to time string.
## From datetime object.
if object_.__class__ == datetime_datetime:
if format is None:
string = str(object_)[:19]
else:
string = object_.strftime(format)
## From date object.
elif object_.__class__ == datetime_date:
if format is None:
string = str(object_)[:10]
else:
string = object_.strftime(format)
## From time object.
elif object_.__class__ == datetime_time:
if format is None:
string = str(object_)[:8]
else:
string = object_.strftime(format)
## From timedelta object.
elif object_.__class__ == datetime_timedelta:
if format is None:
string = str(object_)
if "day" in string:
day, char, string = string.split(" ")
else:
day = "0"
if string[1] == ":":
string = "0" + string
string = "%s %s" % (day, string[:8])
else:
seconds = object_.microseconds / 1000_000
datetime_obj = datetime_datetime.fromtimestamp(seconds)
string = datetime_obj.strftime(format)
## From int object.
elif object_.__class__ == int:
int_len = len(str(object_))
if int_len > 10:
divisor = 10 ** (int_len - 10)
seconds = object_ / divisor
else:
seconds = object_
datetime_obj = datetime_datetime.fromtimestamp(seconds)
if format is None:
format = "%Y-%m-%d %H:%M:%S"
string = datetime_obj.strftime(format)
## From other object.
else:
return object_
return string
@overload
def str2time(
string: Union[str, Any],
type_: Optional[Literal["datetime", "date", "time", "timedelta", "timestamp"]] = None,
format: Optional[str] = None,
throw_e: bool = False
) -> Union[datetime_datetime, datetime_date, datetime_time, datetime_timedelta, int, Any]: ...
@overload
def str2time(type_: Literal["datetime"]) -> Union[datetime_datetime, Any]: ...
@overload
def str2time(type_: Literal["date"]) -> Union[datetime_date, Any]: ...
@overload
def str2time(type_: Literal["time"]) -> Union[datetime_time, Any]: ...
@overload
def str2time(type_: Literal["timedelta"]) -> Union[datetime_timedelta, Any]: ...
@overload
def str2time(type_: Literal["timestamp"]) -> Union[int, Any]: ...
@overload
def str2time(type_: None) -> Union[datetime_datetime, datetime_date, datetime_time, datetime_timedelta, Any]: ...
def str2time(
string: Union[str, Any],
type_: Optional[Literal["datetime", "date", "time", "timedelta", "timestamp"]] = None,
format: Optional[str] = None,
throw_e: bool = False
) -> Union[datetime_datetime, datetime_date, datetime_time, datetime_timedelta, int, Any]:
"""
Format string to time object of package `datetime`.
Parameters
----------
string : Time string.
type_ : Format type.
- `None` : Automatic judgment.
- `Literal[`datetime`]` : Return datetime object of package datetime.
- `Literal[`date`]` : Return date object of package datetime.
- `Literal[`time`]` : Return time object of package datetime.
- `Literal[`timedelta`]` : Return timedelta object of package datetime.
- `Literal[`timestamp`]` : Return time stamp in milliseconds.
format : Format string.
- `None` : Default format method.
* Parameter `type_` is `datetime` : Is `%Y-%m-%d %H:%M:%S`.
* Parameter `type_` is `date` : Is `%Y-%m-%d`.
* Parameter `type_` is `time` : Is `%H:%M:%S`.
* Parameter `type_` is `timedelta` : Is `days %H:%M:%S`.
* Parameter `type_` is `timestamp` : Is `%Y-%m-%d %H:%M:%S`.
* Parameter `type_` is None : automatic judgment.
- `str` : Format by this value.
throw_e : Whether throw exception, when parameter `time_obj` value error, otherwise return original value.
Returns
-------
Time object of datetime package or time stamp or original value.
"""
# Check parameters.
if string.__class__ != str:
return string
# Get time format by automatic judgment.
if type_ is None:
str_len = len(string)
if "年" == string[4:5]:
if str_len > 11:
format = "%Y年%m月%d日 %H时%M分%S秒"
type_ = "datetime"
else:
format = "%Y年%m月%d日"
type_ = "date"
elif "时" in string[1:3]:
format = "%H时%M分%S秒"
type_ = "time"
elif " " in string and "-" not in string:
format = "%H:%M:%S"
type_ = "timedelta"
elif str_len == 19:
format = "%Y-%m-%d %H:%M:%S"
type_ = "datetime"
elif str_len == 14:
format = "%Y%m%d%H%M%S"
type_ = "datetime"
elif str_len == 10:
format = "%Y-%m-%d"
type_ = "date"
elif str_len == 8:
if string[2] == ":":
format = "%H:%M:%S"
type_ = "time"
else:
format = "%Y%m%d"
type_ = "date"
elif str_len == 6:
format = "%H%M%S"
type_ = "time"
elif str_len == 4:
format = "%Y"
type_ = "date"
else:
return string
# Get time format by parameter `type_`.
else:
if format is None:
format_dir = {
"datetime": "%Y-%m-%d %H:%M:%S",
"date": "%Y-%m-%d",
"time": "%H:%M:%S",
"timestamp": "%Y-%m-%d %H:%M:%S",
"timedelta": "%H:%M:%S"
}
format = format_dir[type_]
# Additional processing timedelta type.
if type_ == "timedelta":
if " " in string:
strings = string.split(" ")
day_str, string = strings[0], strings[-1]
else:
day = "0"
try:
day = int(day_str)
except ValueError:
if throw_e:
raise ValueError("failed to format string as time object")
return string
# Convert to time type.
try:
time_obj = datetime_datetime.strptime(string, format)
except ValueError:
if throw_e:
raise ValueError("failed to format string as time object")
return string
if type_ == "date":
time_obj = time_obj.date()
elif type_ == "time":
time_obj = time_obj.time()
elif type_ == "timestamp":
time_obj = int(time_obj.timestamp() * 1000)
elif type_ == "timedelta":
second = time_obj.second
second += day * 86400
time_obj = datetime_timedelta(seconds=second)
return time_obj
def sleep(*thresholds: Union[int, float], precision: Optional[int] = None) -> Union[int, float]:
"""
`Sleep` random seconds.
Parameters
----------
thresholds : Low and high thresholds of random range, range contains thresholds.
- When `length is 0`, then low and high thresholds is `0` and `10`.
- When `length is 1`, then sleep this value.
- When `length is 2`, then low and high thresholds is `thresholds[0]` and `thresholds[1]`.
precision : Precision of random range, that is maximum decimal digits of sleep seconds.
- `None` : Set to Maximum decimal digits of element of parameter `thresholds`.
- `int` : Set to this value.
Returns
-------
Random seconds.
- When parameters `precision` is `0`, then return int.
- When parameters `precision` is `greater than 0`, then return float.
"""
# Handle parameters.
thresholds_len = len(thresholds)
if thresholds_len == 0:
second = randn(0, 10, precision=precision)
elif thresholds_len == 1:
second = thresholds[0]
elif thresholds_len == 2:
second = randn(thresholds[0], thresholds[1], precision=precision)
else:
raise ValueError("number of parameter 'thresholds' must is 0 or 1 or 2")
# Sleep.
time_sleep(second)
return second
class RTimeMark():
"""
Rey`s date time `mark` type.
"""
def __init__(self) -> None:
"""
Mark now time.
"""
# Marking.
self.mark()
def mark(self) -> Dict[
Literal["index", "timestamp", "datetime", "datetime_str", "interval_timestamp", "interval_timedelta", "interval_timedelta_str"],
Optional[Union[str, float, datetime_datetime, datetime_timedelta]]
]:
"""
`Mark` now time and return mark time information.
Returns
-------
Mark time information.
"""
# Compatible with first marking.
if "record" not in self.__dir__():
self.record = []
# Get parametes.
record_len = len(self.record)
mark_info = {
"index": record_len,
"timestamp": now("timestamp"),
"datetime": now("datetime"),
"datetime_str": now(),
}
# Marking.
## First.
if record_len == 0:
mark_info["interval_timestamp"] = None
mark_info["interval_timedelta"] = None
mark_info["interval_timedelta_str"] = None
## Non first.
else:
last_datetime = self.record[-1]["datetime"]
last_timestamp = self.record[-1]["timestamp"]
mark_info["interval_timestamp"] = mark_info["timestamp"] - last_timestamp
mark_info["interval_timedelta"] = mark_info["datetime"] - last_datetime
mark_info["interval_timedelta_str"] = time2str(mark_info["interval_timedelta"])
self.record.append(mark_info)
return mark_info
def report(self) -> DataFrame:
"""
`Print` and return mark time information.
Returns
-------
DataFrame object of pandas package with mark time information.
"""
# Get parameters.
data = [
{
"timestamp": row["timestamp"],
"datetime": row["datetime_str"],
"interval": row["interval_timedelta_str"]
}
for row in self.record
]
# Generate report.
report_df = DataFrame(data, dtype=str)
interval_timedelta = self.record[-1]["datetime"] - self.record[0]["datetime"]
interval = time2str(interval_timedelta)
sum_df = DataFrame({"interval": interval}, index = ["sum"])
report_df = pd_concat([report_df, sum_df])
report_df.fillna("-", inplace=True)
# Report.
title = "Time Mark"
rprint(report_df, title=title)
return report_df | /reyworm-0.6-py3-none-any.whl/reytool/rdatetime.py | 0.917967 | 0.308937 | rdatetime.py | pypi |
def sqlalchemy_add_result_more_fetch():
"""
`Modify` package `sqlalchemy`, add more fetch methods to CursorResult object.
"""
from sqlalchemy.engine.cursor import CursorResult
from .rdata import to_table, to_dict, to_df, to_json, to_sql, to_html, to_csv, to_excel
# Fetch result as table in List[Dict] format.
CursorResult.fetch_table = to_table
# Fetch result as dictionary.
CursorResult.fetch_dict = to_dict
# Fetch result as DataFrame object.
CursorResult.fetch_df = to_df
# Fetch result as JSON string.
CursorResult.fetch_json = to_json
# Fetch result as SQL string.
CursorResult.fetch_sql = to_sql
# Fetch result as HTML string.
CursorResult.fetch_html = to_html
# Fetch result as save csv format file.
CursorResult.fetch_csv = to_csv
# Fetch result as save excel file.
CursorResult.fetch_excel = to_excel
# Update annotations.
class RResult(CursorResult):
"""
`Update` based on `CursorResult` object, for annotation return value.
"""
# Inherit document.
__doc__ = CursorResult.__doc__
# Add more fetch methods.
fetch_table = to_table
fetch_dict = to_dict
fetch_df = to_df
fetch_json = to_json
fetch_sql = to_sql
fetch_html = to_html
fetch_csv = to_csv
fetch_excel = to_excel
return RResult
def sqlalchemy_support_row_index_by_field():
"""
`Modify` package `sqlalchemy`, support Row object of package sqlalchemy index by field name.
"""
from typing import Any, Union, Sequence, overload
from sqlalchemy.engine.row import Row
# New method.
@overload
def __getitem__(self, index: Union[str, int, slice]) -> Union[Any, Sequence[Any]]: ...
@overload
def __getitem__(self, index: Union[str, int]) -> Any: ...
@overload
def __getitem__(self, index: slice) -> Sequence[Any]: ...
def __getitem__(self, index: Union[str, int, slice]) -> Union[Any, Sequence[Any]]:
"""
`Index` row value.
Parameters
----------
index : Field name or subscript or slice.
Returns
-------
Index result.
"""
# Index.
if index.__class__ == str:
value = self._mapping[index]
else:
value = self._data[index]
return value
# Modify index method.
Row.__getitem__ = __getitem__
def pprint_modify_format_width_judgment() -> None:
"""
Based on module `pprint.pformat`, `modify` the chinese width judgment.
"""
from pprint import PrettyPrinter, _recursion
from urwid import old_str_util
# Chinese width can be determined.
def get_width(text: str) -> int:
"""
`Get` text `display width`.
Parameters
----------
text : Text.
Returns
-------
Text display width.
"""
# Get width.
total_width = 0
for char in text:
char_unicode = ord(char)
char_width = old_str_util.get_width(char_unicode)
total_width += char_width
return total_width
# New method.
def _format(_self, object, stream, indent, allowance, context, level):
objid = id(object)
if objid in context:
stream.write(_recursion(object))
_self._recursive = True
_self._readable = False
return
rep = _self._repr(object, context, level)
max_width = _self._width - indent - allowance
width = get_width(rep)
if width > max_width:
p = _self._dispatch.get(type(object).__repr__, None)
if p is not None:
context[objid] = 1
p(_self, object, stream, indent, allowance, context, level + 1)
del context[objid]
return
elif isinstance(object, dict):
context[objid] = 1
_self._pprint_dict(object, stream, indent, allowance,
context, level + 1)
del context[objid]
return
stream.write(rep)
# Modify the chinese width judgment.
PrettyPrinter._format = _format | /reyworm-0.6-py3-none-any.whl/reytool/rmonkey.py | 0.72952 | 0.323968 | rmonkey.py | pypi |
from typing import Any, Tuple, Callable, Optional, Union, Literal, overload
from tqdm import tqdm as tqdm_tqdm
from threading import Thread
from functools import wraps as functools_wraps
from .rbase import exc
from .rtext import rprint
from .rdatetime import RTimeMark, now
from . import roption
def wrap_frame(decorator: Callable) -> Callable:
"""
Decorative `frame`.
Parameters
----------
decorator : Decorator function.
Retuens
-------
Decorator after decoration.
Examples
--------
Decoration function method one.
>>> @wrap_func
>>> def func(): ...
>>> result = func(param_a, param_b, param_c=1, param_d=2)
Decoration function method two.
>>> def func(): ...
>>> result = wrap_func(func, param_a, param_b, param_c=1, param_d=2)
Decoration function method three.
>>> def func(): ...
>>> result = wrap_func(func, _execute=True)
Decoration function method four.
>>> def func(): ...
>>> func = wrap_func(func)
>>> result = func(param_a, param_b, param_c=1, param_d=2)
Decoration function method five.
>>> def func(): ...
>>> func = wrap_func(func, param_a, param_c=1, _execute=False)
>>> result = func(param_b, param_d=2)
"""
# Decorate Decorator.
@functools_wraps(decorator)
def wrap(func: Callable, *args: Any, _execute: Optional[bool] = None, **kwargs: Any) -> Union[Callable, Any]:
"""
Decorative `shell`.
Parameters
----------
_execute : Whether execute function, otherwise decorate function.
- `None` : When parameter `args` or `kwargs` have values, then True, otherwise False.
- `bool` : Use this value.
Returns
-------
Function after decoration or return of function.
"""
# Handle parameters.
if _execute is None:
if args != () or kwargs != {}:
_execute = True
else:
_execute = False
# Direct execution.
if _execute:
result = decorator(func, *args, **kwargs)
return result
# Decorate function.
@functools_wraps(func)
def wrap_sub(*_args: object, **_kwargs: object) -> object:
"""
Decorative sub shell.
"""
# Decorate function.
result = decorator(func, *args, *_args, **kwargs, **_kwargs)
return result
return wrap_sub
return wrap
def wraps(*wrap_funcs: Callable) -> Callable:
"""
`Batch` decorate.
Parameters
----------
wrap_funcs : Decorator function.
Retuens
-------
Function after decoration.
Examples
--------
Decoration function.
>>> @wraps(print_funtime, state_thread)
>>> def func(): ...
>>> result = func()
Same up and down
>>> @print_funtime
>>> @state_thread
>>> def func(): ...
>>> result = func()
Same up and down
>>> def func(): ...
>>> func = print_funtime(func)
>>> func = state_thread(func)
>>> result = func()
"""
# Sequential decorate.
def func(): ...
for wrap_func in wrap_funcs:
## One shell.
@functools_wraps(func)
def wrap(func: Callable) -> Callable:
"""
Decorative shell
"""
## Two shell.
@functools_wraps(func)
def wrap_sub(*args: object, **kwargs: object) -> object:
"""
Decorative sub shell
"""
# Decorate.
result = wrap_func(func, *args, _execute=True, **kwargs)
return result
return wrap_sub
func = wrap
return wrap
@overload
def runtime(func: Callable, *args: Any, _return_report: bool = False, **kwargs: Any) -> Union[Any, Tuple[Any, str]]: ...
@overload
def runtime(_return_report: Literal[False]) -> Any: ...
@overload
def runtime(_return_report: Literal[True]) -> Union[Any, Tuple[Any, str]]: ...
@wrap_frame
def runtime(func: Callable, *args: Any, _return_report: bool = False, **kwargs: Any) -> Union[Any, Tuple[Any, str]]:
"""
Print or return `runtime` report of the function.
Parameters
----------
func : Function to be decorated.
args : Position parameter of input parameter decorated function.
_return_report : Whether return report, otherwise print report.
kwargs : Keyword parameter of input parameter decorated function.
Returns
-------
Function execute result or runtime report.
"""
# Execute function and marking time.
rtm = RTimeMark()
result = func(*args, **kwargs)
rtm.mark()
# Generate report.
runtime = rtm.record[-1]["interval_timestamp"] / 1000
report = "Start: %s -> Spend: %ss -> End: %s" % (
rtm.record[0]["datetime_str"],
runtime,
rtm.record[1]["datetime_str"]
)
title = func.__name__
# Return report.
if _return_report:
return result, report
# Print report.
rprint(report, title=title)
return result
@overload
def start_thread(func: Callable, *args: Any, _daemon: bool = True, **kwargs: Any) -> Thread: ...
@wrap_frame
def start_thread(func: Callable, *args: Any, _daemon: bool = True, **kwargs: Any) -> Thread:
"""
Function start in `thread`.
Parameters
----------
func : Function to be decorated.
args : Position parameter of input parameter decorated function.
_daemon : Whether it is a daemon thread.
kwargs : Keyword parameter of input parameter decorated function.
Returns
-------
Thread object.
"""
# Handle parameters.
thread_name = "%s_%d" % (func.__name__, now("timestamp"))
# Create thread.
thread = Thread(target=func, name=thread_name, args=args, kwargs=kwargs)
thread.daemon = _daemon
# Start thread.
thread.start()
return thread
@overload
def try_exc(func: Callable, *args: Any, **kwargs: Any) -> Optional[Any]: ...
@wrap_frame
def try_exc(func: Callable, *args: Any, **kwargs: Any) -> Optional[Any]:
"""
Execute function with `try` syntax and print exception information.
Parameters
----------
func : Function to be decorated.
args : Position parameter of input parameter decorated function.
kwargs : Keyword parameter of input parameter decorated function.
Returns
-------
Function execute result or no return.
"""
# Execute function.
try:
result = func(*args, **kwargs)
# Print exception information.
except:
func_name = func.__name__
exc(func_name)
# Return function result.
else:
return result
@overload
def update_tqdm(
func: Callable,
tqdm: tqdm_tqdm,
*args: Any,
_desc: Optional[str] = None,
_step: Union[int, float] = 1,
**kwargs: Any
) -> Any: ...
@wrap_frame
def update_tqdm(
func: Callable,
tqdm: tqdm_tqdm,
*args: Any,
_desc: Optional[str] = None,
_step: Union[int, float] = 1,
**kwargs: Any
) -> Any:
"""
Update progress bar `tqdm` object of `tqdm` package.
Parameters
----------
func : Function to be decorated.
tqdm : Progress bar tqdm object.
args : Position parameter of input parameter decorated function.
_desc : Progress bar description.
- `None` : no description.
- `str` : Add description.
_step : Progress bar step size.
- `When` greater than 0, then forward.
- `When` less than 0, then backward.
kwargs : Keyword parameter of input parameter decorated function.
Returns
-------
Function execute result.
"""
# Set description.
if _desc is not None:
tqdm.set_description(_desc)
# Execute function.
result = func(*args, **kwargs)
# Update progress bar.
tqdm.update(_step)
return result
@overload
def retry(
func: Callable,
*args: Any,
_report: Optional[str] = None,
_exception: Union[BaseException, Tuple[BaseException, ...]] = BaseException,
_try_total: int = 1,
_try_count: int = 0,
**kwargs: Any
) -> Any: ...
@wrap_frame
def retry(
func: Callable,
*args: Any,
_report: Optional[str] = None,
_exception: Union[BaseException, Tuple[BaseException, ...]] = BaseException,
_try_total: int = 1,
_try_count: int = 0,
**kwargs: Any
) -> Any:
"""
`Try` again.
Parameters
----------
func : Function to be decorated.
args : Position parameter of input parameter decorated function.
_report : Print report title.
- `None` : Not print.
- `str` : Print and use this title.
_exception : Catch exception types.
_try_total : Retry total.
_try_count : Retry count.
kwargs : Keyword parameter of input parameter decorated function.
Returns
-------
Function execute result.
"""
# Try count not full.
if _try_count < _try_total:
## Try.
try:
result = func(*args, **kwargs)
except _exception:
## Report.
if _report is not None:
exception_msg, _, _, _ = exc()
rprint(
exception_msg,
"Retrying...",
title=_report,
frame="half"
)
### Retry.
_try_count += 1
result = retry(
func,
*args,
_report=_report,
_exception=_exception,
_try_total=_try_total,
_try_count=_try_count,
**kwargs
)
# Try count full.
else:
result = func(*args, **kwargs)
return result | /reyworm-0.6-py3-none-any.whl/reytool/rwrap.py | 0.923017 | 0.287468 | rwrap.py | pypi |
from typing import Any, List, Tuple, Dict, Iterable, Optional, Literal, Union, ClassVar, NoReturn, overload
from re import findall
from sqlalchemy import create_engine as sqlalchemy_create_engine, text
from sqlalchemy.engine.base import Engine, Connection
from sqlalchemy.engine.cursor import CursorResult
from sqlalchemy.engine.url import URL
from sqlalchemy.sql.elements import TextClause
from sqlalchemy.exc import OperationalError
from pandas import DataFrame
from .rbase import get_first_notnull
from .rdata import to_table
from .rmonkey import sqlalchemy_add_result_more_fetch, sqlalchemy_support_row_index_by_field
from . import roption
from .rother import str2n
from .rregular import search
from .rtext import rprint
from .rwrap import runtime, retry
# Add more fetch methods to CursorResult object.
RResult = sqlalchemy_add_result_more_fetch()
# Support Row object of package sqlalchemy index by field name.
sqlalchemy_support_row_index_by_field()
class REngine(object):
"""
Rey's database `Engine` type, based on the package `sqlalchemy`.
"""
# Values to be converted to "NULL".
null_values: ClassVar[List] = ["", " ", b"", [], (), {}, set()]
@overload
def __init__(
self,
host: Optional[str] = None,
port: Optional[str] = None,
username: Optional[str] = None,
password: Optional[str] = None,
database: Optional[str] = None,
drivername: Optional[str] = None,
pool_size: int = 5,
max_overflow: int = 10,
pool_timeout: float = 30.0,
pool_recycle: Optional[int] = None,
url: Optional[Union[str, URL]] = None,
engine: Optional[Union[Engine, Connection]] = None,
**query: str
) -> None: ...
@overload
def __init__(self, username: None, url: None, engine: None) -> NoReturn: ...
@overload
def __init__(self, password: None, url: None, engine: None) -> NoReturn: ...
@overload
def __init__(self, host: None, url: None, engine: None) -> NoReturn: ...
@overload
def __init__(self, port: None, url: None, engine: None) -> NoReturn: ...
def __init__(
self,
host: Optional[str] = None,
port: Optional[str] = None,
username: Optional[str] = None,
password: Optional[str] = None,
database: Optional[str] = None,
drivername: Optional[str] = None,
pool_size: int = 5,
max_overflow: int = 10,
pool_timeout: float = 30.0,
pool_recycle: Optional[int] = None,
url: Optional[Union[str, URL]] = None,
engine: Optional[Union[Engine, Connection]] = None,
**query: str
) -> None:
"""
Create database `Engine` object and set parameters.
Parameters
----------
host : Server host.
port : Server port.
username : Server user name.
password : Server password.
database : Database name in the server.
drivername : Database backend and driver name.
- `None` : Auto select and try.
- `str` : Use this value.
pool_size : Number of connections `keep open`.
max_overflow : Number of connections `allowed overflow`.
pool_timeout : Number of seconds `wait create` connection.
pool_recycle : Number of seconds `recycle` connection.
- `None` : Use database variable `wait_timeout` value.
- `Literal[-1]` : No recycle.
- `int` : Use this value.
url: Get parameters from server `URL`, but preferred input parameters.
Parameters include `username`, `password`, `host`, `port`, `database`, `drivername`, `query`.
engine : Use existing `Engine` or `Connection` object, and get parameters from it.
Parameters include `username`, `password`, `host`, `port`, `database`, `drivername`, `query`,
`pool_size`, `max_overflow`, `pool_timeout`, `pool_recycle`.
query : Server parameters.
"""
# From existing Engine or Connection object.
if engine is not None:
## Extract Engine object from Connection boject.
if engine.__class__ == Connection:
engine = engine.engine
## Extract parameters.
params = self.extract_from_engine(engine)
## Set.
self.drivername = params["drivername"]
self.username = params["username"]
self.password = params["password"]
self.host = params["host"]
self.port = params["port"]
self.database = params["database"]
self.query = params["query"]
self.pool_size = params["pool_size"]
self.max_overflow = params["max_overflow"]
self.pool_timeout = params["pool_timeout"]
self.pool_recycle = params["pool_recycle"]
self.engine = engine
# From parameters create.
else:
## Extract parameters from URL.
if url is not None:
params = self.extract_from_url(url)
else:
params = dict.fromkeys(
(
"drivername",
"username",
"password",
"host",
"port",
"database",
"query"
)
)
## Set parameters by priority.
self.drivername = get_first_notnull(drivername, params["drivername"])
self.username = get_first_notnull(username, params["username"], default="exception")
self.password = get_first_notnull(password, params["password"], default="exception")
self.host = get_first_notnull(host, params["host"], default="exception")
self.port = get_first_notnull(port, params["port"], default="exception")
self.database = get_first_notnull(database, params["database"])
self.query = get_first_notnull(query, params["query"], default={"charset": "utf8"}, null_values=[None, {}])
self.pool_size = pool_size
self.max_overflow = max_overflow
self.pool_timeout = pool_timeout
## Create Engine object.
### Set number of seconds recycle connection.
if pool_recycle is None:
self.pool_recycle = -1
self.engine = self.create_engine()
variables = self.get_variables()
self.pool_recycle = int(variables["wait_timeout"])
self.engine.pool._recycle = int(variables["wait_timeout"])
else:
self.pool_recycle = pool_recycle
self.engine = self.create_engine()
def extract_from_url(self, url: Union[str, URL]) -> Dict[
Literal["drivername", "username", "password", "host", "port", "database", "query"],
Optional[Union[str, Dict[str, str]]]
]:
"""
Extract parameters from `URL` of string.
Parameters
----------
url : URL of string.
Returns
-------
Extracted parameters.
"""
# Extract.
## When str object.
if url.__class__ == str:
pattern = "^([\w\+]+)://(\w+):(\w+)@(\d+\.\d+\.\d+\.\d+):(\d+)[/]?([\w/]+)?[\?]?([\w&=]+)?$"
result = search(pattern, url)
if result is None:
raise ValueError("the value of parameter 'url' is incorrect")
(
drivername,
username,
password,
host,
port,
database,
query_str
) = result
if query_str is not None:
pattern = "(\w+)=(\w+)"
query_findall = findall(pattern, query_str)
query = {key: val for key, val in query_findall}
else:
query = {}
## When URL object.
elif url.__class__ == URL:
drivername = url.drivername
username = url.username
password = url.password
host = url.host
port = url.port
database = url.database
query = dict(url.query)
# Generate parameters.
params = {
"drivername": drivername,
"username": username,
"password": password,
"host": host,
"port": port,
"database": database,
"query": query
}
return params
def extract_from_engine(self, engine: Union[Engine, Connection]) -> Dict[
Literal[
"drivername", "username", "password", "host", "port", "database", "query",
"pool_size", "max_overflow", "pool_timeout", "pool_recycle"
],
Optional[Union[str, Dict[str, str], float]]
]:
"""
Extract parameters from `Engine` or `Connection` object.
Parameters
----------
engine : Engine or Connection object.
Returns
-------
Extracted parameters.
"""
## Extract Engine object from Connection boject.
if engine.__class__ == Connection:
engine = engine.engine
## Extract.
drivername = engine.url.drivername
username = engine.url.username
password = engine.url.password
host = engine.url.host
port = engine.url.port
database = engine.url.database
query = dict(engine.url.query)
pool_size = engine.pool._pool.maxsize
max_overflow = engine.pool._max_overflow
pool_timeout = engine.pool._timeout
pool_recycle = engine.pool._recycle
# Generate parameters.
params = {
"drivername": drivername,
"username": username,
"password": password,
"host": host,
"port": port,
"database": database,
"query": query,
"pool_size": pool_size,
"max_overflow": max_overflow,
"pool_timeout": pool_timeout,
"pool_recycle": pool_recycle
}
return params
def url(self) -> str:
"""
Generate server `URL`.
Returns
-------
Server URL.
"""
# Generate URL.
_url = f"{self.drivername}://{self.username}:{self.password}@{self.host}:{self.port}"
# Add database path.
if not self.database is None:
_url = f"{_url}/{self.database}"
# Add Server parameters.
if self.query != {}:
query = "&".join(
[
"%s=%s" % (key, val)
for key, val in self.query.items()
]
)
_url = f"{_url}?{query}"
return _url
def create_engine(self) -> Engine:
"""
Create database `Engine` object.
Returns
-------
Engine object.
"""
# Handle parameters.
if self.drivername is None:
drivernames = ("mysql+mysqldb", "mysql+pymysql")
else:
drivernames = (self.drivername,)
# Create Engine object.
for drivername in drivernames:
self.drivername = drivername
url = self.url()
## Try create.
try:
engine = sqlalchemy_create_engine(
url,
pool_size=self.pool_size,
max_overflow=self.max_overflow,
pool_timeout=self.pool_timeout,
pool_recycle=self.pool_recycle
)
except ModuleNotFoundError:
pass
else:
return engine
# Throw exception.
drivernames_str = " and ".join(
[
dirvername.split("+", 1)[-1]
for dirvername in drivernames
]
)
raise ModuleNotFoundError("module %s not fund" % drivernames_str)
def count_connection(self) -> Tuple[int, int]:
"""
Count number of `keep open` and `allowed overflow` connection.
Returns
-------
Number of keep open and allowed overflow connection.
"""
# Count.
_overflow = self.engine.pool._overflow
if _overflow < 0:
keep_n = self.pool_size + _overflow
overflow_n = 0
else:
keep_n = self.pool_size
overflow_n = _overflow
return keep_n, overflow_n
def fill_data(
self,
data: Union[Dict, List[Dict]],
sql: Union[str, TextClause],
) -> List[Dict]:
"""
`Fill` missing data according to contents of `TextClause` object of package `sqlalchemy`, and filter out empty Dict.
Parameters
----------
data : Data set for filling.
sql : SQL in method sqlalchemy.text format, or TextClause object.
Returns
-------
Filled data.
"""
# Handle parameters.
if data.__class__ == dict:
data = [data]
if sql.__class__ == TextClause:
sql = sql.text
# Filter out empty Dict.
data = [
param
for param in data
if param != {}
]
# Extract fill field names.
pattern = "(?<!\\\):(\w+)"
sql_keys = findall(pattern, sql)
# Fill data.
for param in data:
for key in sql_keys:
val = param.get(key)
if val in self.null_values:
val = None
param[key] = val
return data
def get_syntax(self, sql: Union[str, TextClause]) -> str:
"""
Extract `syntax` type form SQL.
Parameters
----------
sql : SQL text or TextClause object.
Returns
-------
SQL syntax type.
"""
# Handle parameters.
if sql.__class__ == TextClause:
sql = sql.text
# Extract.
split_sql = sql.split(maxsplit=1)
syntax_type = split_sql[0]
syntax_type = syntax_type.upper()
return syntax_type
def executor(
self,
connection: Connection,
sql: TextClause,
data: List[Dict],
report: bool
) -> RResult:
"""
`SQL` executor.
Parameters
----------
connection : Connection object.
sql : TextClause object.
data : Data set for filling.
report : Whether report SQL execute information.
Returns
-------
CursorResult object of alsqlchemy package.
"""
# When REngine.
if self.__class__ == REngine:
## Create Transaction object.
with connection.begin():
## Execute.
### Report.
if report:
result, report_runtime = runtime(connection.execute, sql, data, _return_report=True)
report_info = "%s\nRow Count: %d" % (report_runtime, result.rowcount)
if data == []:
rprint(report_info, sql, title="SQL")
else:
rprint(report_info, sql, data, title="SQL")
### Not report.
else:
result = connection.execute(sql, data)
# When RConnection.
elif self.__class__ == RConnection:
## Create Transaction object.
if self.begin_count == 0:
self.rollback()
self.begin = connection.begin()
## Execute.
### Report.
if report:
result, report_runtime = runtime(connection.execute, sql, data, _return_report=True)
report_info = "%s\nRow Count: %d" % (report_runtime, result.rowcount)
if data == []:
rprint(report_info, sql, title="SQL")
else:
rprint(report_info, sql, data, title="SQL")
### Not report.
else:
result = connection.execute(sql, data)
## Count.
syntax = self.get_syntax(sql)
if syntax in ("UPDATE", "INSERT", "DELETE"):
self.begin_count += 1
return result
def execute(
self,
sql: Union[str, TextClause],
data: Optional[Union[List[Dict], Dict, CursorResult, DataFrame]] = None,
report: bool = None,
**kwdata: Any
) -> RResult:
"""
Execute `SQL`.
Parameters
----------
sql : SQL in method `sqlalchemy.text` format, or `TextClause` object.
data : Data set for filling.
report : Whether report SQL execute information.
- `None` : Use `report_execute_info` of module `roption`.
- `int` : Use this value.
kwdata : Keyword data for filling.
Returns
-------
CursorResult object of alsqlchemy package.
"""
# Get parameters by priority.
report = get_first_notnull(report, roption.report_execute_info)
# Handle parameters.
if sql.__class__ == str:
sql = text(sql)
if data is None:
data = [kwdata]
else:
if data.__class__ == dict:
data = [data]
elif isinstance(data, CursorResult):
data = to_table(data)
elif data.__class__ == DataFrame:
data = to_table(data)
else:
data = data.copy()
for param in data:
param.update(kwdata)
# Fill missing data.
data = self.fill_data(data, sql)
# Execute.
## When REngine.
if self.__class__ == REngine:
### Create Connection object.
with self.engine.connect() as connection:
### Can retry.
result = retry(
self.executor,
connection,
sql,
data,
report,
_report="Database execute operational error",
_exception=OperationalError
)
## When RConnection.
elif self.__class__ == RConnection:
### Can retry when not counted.
if self.begin_count == 0:
result = retry(
self.executor,
self.connection,
sql,
data,
report,
_report="Database execute operational error",
_exception=OperationalError
)
### Cannot retry when counted.
else:
result = self.executor(self.connection, sql, data, report)
return result
def execute_select(
self,
table: str,
database: Optional[str] = None,
fields: Optional[Union[str, Iterable]] = None,
where: Optional[str] = None,
group: Optional[str] = None,
having: Optional[str] = None,
order: Optional[str] = None,
limit: Optional[Union[int, str, List, Tuple]] = None,
report: bool = None,
**kwdata: Any
) -> RResult:
"""
Execute `select` SQL.
Parameters
----------
table : Table name.
database : Database name.
fields : Select clause content.
- `None` : Is `SELECT *`.
- `str` : Join as `SELECT str`.
- `Iterable[str]` : Join as `SELECT \`str\`, ...`.
where : Clause `WHERE` content, join as `WHERE str`.
group : Clause `GROUP BY` content, join as `GROUP BY str`.
having : Clause `HAVING` content, join as `HAVING str`.
order : Clause `ORDER BY` content, join as `ORDER BY str`.
limit : Clause `LIMIT` content.
- `Union[int, str]` : Join as `LIMIT int/str`.
- `Union[List, Tuple]` with length of 1 or 2 `int/str` : Join as `LIMIT int/str [, int/str]`.
report : Whether report SQL execute information.
- `None` : Use `report_execute_info` of module `roption`.
- `int` : Use this value.
kwdata : Keyword data for filling.
Returns
-------
CursorResult object of alsqlchemy package.
"""
# Handle parameters.
if database is None:
_database = self.database
else:
_database = database
# Generate SQL.
sqls = []
## Part 'SELECT' syntax.
if fields is None:
fields = "*"
elif fields.__class__ != str:
fields = ",".join(["`%s`" % field for field in fields])
sql_select = f"SELECT {fields}"
sqls.append(sql_select)
## Part 'FROM' syntax.
sql_from = f"FROM `{_database}`.`{table}`"
sqls.append(sql_from)
## Part 'WHERE' syntax.
if where is not None:
sql_where = "WHERE %s" % where
sqls.append(sql_where)
## Part 'GROUP BY' syntax.
if group is not None:
sql_group = "GROUP BY %s" % group
sqls.append(sql_group)
## Part 'GROUP BY' syntax.
if having is not None:
sql_having = "HAVING %s" % having
sqls.append(sql_having)
## Part 'ORDER BY' syntax.
if order is not None:
sql_order = "ORDER BY %s" % order
sqls.append(sql_order)
## Part 'LIMIT' syntax.
if limit is not None:
if limit.__class__ in (str, int):
sql_limit = f"LIMIT {limit}"
else:
if len(limit) in (1, 2):
limit_content = ",".join([str(val) for val in limit])
sql_limit = "LIMIT %s" % limit_content
else:
raise ValueError("The length of the parameter 'limit' value must be 1 or 2")
sqls.append(sql_limit)
sql = "\n".join(sqls)
# Execute SQL.
result = self.execute(sql, report=report, **kwdata)
return result
def execute_update(
self,
data: Union[List[Dict], Dict, CursorResult, DataFrame],
table: str,
database: Optional[str] = None,
where_fields: Optional[Union[str, Iterable[str]]] = None,
report: bool = None
) -> Optional[RResult]:
"""
`Update` the data of table in the datebase.
Parameters
----------
data : Updated data.
table : Table name.
database : Database name.
where_fields : Clause `WHERE` clause content.
- `None` : The first key value pair of each item is judged.
- `str` : This key value pair of each item is judged.
- `Iterable[str]` : Multiple judged, `and` relationship.
report : Whether report SQL execute information.
- `None` : Use `report_execute_info` of module `roption`.
- `int` : Use this value.
Returns
-------
None or CursorResult object.
- `None` : When the data is empty.
- `CursorResult object` : When the data is not empty.
"""
# Handle parameters.
if data.__class__ == dict:
data = [data]
elif isinstance(data, CursorResult):
data = to_table(data)
elif data.__class__ == DataFrame:
data = to_table(data)
if database is None:
_database = self.database
else:
_database = database
# If data is empty, not execute.
if data in ([], [{}]):
return
# Generate SQL.
data_flatten = {}
sqls = []
if where_fields is None:
no_where = True
else:
no_where = False
if where_fields.__class__ == str:
where_fields = [where_fields]
for index, row in enumerate(data):
for key, val in row.items():
index_key = "%d_%s" % (index, key)
data_flatten[index_key] = val
if no_where:
where_fields = [list(row.keys())[0]]
set_content = ",".join(
[
"`%s` = :%d_%s" % (key, index, key)
for key in row
if key not in where_fields
]
)
where_content = "\n AND ".join(
[
f"`{field}` = :{index}_{field}"
for field in where_fields
]
)
sql = (
f"UPDATE `{_database}`.`{table}`\n"
f"SET {set_content}\n"
f"WHERE {where_content}"
)
sqls.append(sql)
sqls = ";\n".join(sqls)
# Execute SQL.
result = self.execute(sqls, data_flatten, report)
return result
def execute_insert(
self,
data: Union[List[Dict], Dict, CursorResult, DataFrame],
table: str,
database: Optional[str] = None,
duplicate_method: Optional[Literal["ignore", "update"]] = None,
report: bool = None
) -> Optional[RResult]:
"""
`Insert` the data of table in the datebase.
Parameters
----------
data : Updated data.
table : Table name.
database : Database name.
duplicate_method : Handle method when constraint error.
- `None` : Not handled.
- `ignore` : Use `UPDATE IGNORE INTO` clause.
- `update` : Use `ON DUPLICATE KEY UPDATE` clause.
report : Whether report SQL execute information.
- `None` : Use `report_execute_info` of module `roption`.
- `int` : Use this value.
Returns
-------
None or CursorResult object.
- `None` : When the data is empty.
- `CursorResult` object : When the data is not empty.
"""
# Handle parameters.
if data.__class__ == dict:
data = [data]
elif isinstance(data, CursorResult):
data = to_table(data)
elif data.__class__ == DataFrame:
data = to_table(data)
if database is None:
_database = self.database
else:
_database = database
# If data is empty, not execute.
if data in ([], [{}]):
return
# Generate SQL.
fields = list({key for row in data for key in row})
fields_str = ",".join(["`%s`" % field for field in fields])
fields_str_position = ",".join([":" + field for field in fields])
if duplicate_method == "ignore":
sql = (
f"INSERT IGNORE INTO `{_database}`.`{table}`({fields_str})\n"
f"VALUES({fields_str_position})"
)
elif duplicate_method == "update":
update_content = ",".join(["`%s` = VALUES(`%s`)" % (field, field) for field in fields])
sql = (
f"INSERT INTO `{_database}`.`{table}`({fields_str})\n"
f"VALUES({fields_str_position})\n"
"ON DUPLICATE KEY UPDATE\n"
f"{update_content}"
)
else:
sql = (
f"INSERT INTO `{_database}`.`{table}`({fields_str})\n"
f"VALUES({fields_str_position})"
)
# Execute SQL.
result = self.execute(sql, data, report)
return result
@overload
def execute_exist(
self,
table: str,
database: Optional[str] = None,
where: Optional[str] = None,
count: bool = False,
report: bool = None,
**kwdata: Any
) -> Union[bool, int]: ...
@overload
def execute_exist(
self,
count: Literal[False]
) -> bool: ...
@overload
def execute_exist(
self,
count: Literal[True]
) -> int: ...
def execute_exist(
self,
table: str,
database: Optional[str] = None,
where: Optional[str] = None,
count: bool = False,
report: bool = None,
**kwdata: Any
) -> Union[bool, int]:
"""
`Count` records.
Parameters
----------
table : Table name.
database : Database name.
where : Match condition, `WHERE` clause content, join as `WHERE str`.
- `None` : Match all.
- `str` : Match condition.
count : Whether return match count, otherwise return whether it exist.
report : Whether report SQL execute information.
- `None` : Use `report_execute_info` of module `roption`.
- `int` : Use this value.
kwdata : Keyword data for filling.
Returns
-------
CursorResult object of alsqlchemy package.
"""
# Get parameters.
if count:
limit = None
else:
limit = 1
# Execute.
result = self.execute_select(table, database, "1", where=where, limit=limit, report=report, **kwdata)
# Returns.
rowcount = result.rowcount
if count:
return rowcount
else:
return rowcount != 0
def get_variables(self, global_: bool = False) -> Dict[str, str]:
"""
Get `variables` of database.
Parameters
----------
global_ : Whether get global variable, otherwise get local variable.
"""
# Generate SQL.
if global_:
sql = "SHOW GLOBAL VARIABLES"
else:
sql = "SHOW VARIABLES"
# Execute SQL.
result = self.execute(sql)
# Convert dictionary.
variables = result.fetch_dict()
return variables
def get_status(self, global_: bool = False) -> Dict[str, str]:
"""
Get `status` of database.
Parameters
----------
global_ : Whether get global variable, otherwise get local variable.
"""
# Generate SQL.
if global_:
sql = "SHOW GLOBAL STATUS"
else:
sql = "SHOW STATUS"
# Execute SQL.
result = self.execute(sql)
# Convert dictionary.
status = result.fetch_dict()
return status
def update_variables(self, params: Dict[str, Union[str, float]], global_: bool = False) -> None:
"""
Update `variables` of database.
Parameters
----------
params : Update parameter key value pairs.
global_ : Whether update global variable, otherwise update local variable.
"""
# Generate SQL.
if global_:
sql_global = " GLOBAL"
else:
sql_global = ""
sqls = [
"SET%s %s = %s" % (
sql_global,
key,
(
val
if val.__class__ in (int, float)
else "'%s'" % val
)
)
for key, val in params.items()
]
sqls = ";\n".join(sqls)
# Execute SQL.
self.execute(sqls)
def connect(self):
"""
Create database `Connection` object.
"""
rconnection = RConnection(
self.engine.connect(),
self
)
return rconnection
class RConnection(REngine):
"""
Rey's database `Connection` type, based on the package `sqlalchemy`.
"""
def __init__(
self,
connection: Connection,
rengine: REngine
) -> None:
"""
Create database `Connection` object and set parameters.
Parameters
----------
connection : Connection object.
rengine : REngine object.
"""
self.connection = connection
self.rengine = rengine
self.begin = None
self.begin_count = 0
self.drivername = rengine.drivername
self.username = rengine.username
self.password = rengine.password
self.host = rengine.host
self.port = rengine.port
self.database = rengine.database
self.query = rengine.query
self.pool_recycle = rengine.pool_recycle
def commit(self) -> None:
"""
`Commit` cumulative executions.
"""
# Commit.
if not self.begin is None:
self.begin.commit()
self.begin = None
self.begin_count = 0
def rollback(self) -> None:
"""
`Rollback` cumulative executions.
"""
# Rollback.
if not self.begin is None:
self.begin.rollback()
self.begin = None
self.begin_count = 0
def close(self) -> None:
"""
`Close` database connection.
"""
# Close.
self.connection.close()
def __del__(self) -> None:
"""
`Close` database connection.
"""
# Close.
self.close() | /reyworm-0.6-py3-none-any.whl/reytool/rdatabase.py | 0.868938 | 0.205755 | rdatabase.py | pypi |
from typing import Any, List, Dict, Iterable, Literal, Optional, Union, Type, NoReturn, overload
from os.path import abspath as os_abspath
from pandas import DataFrame, ExcelWriter, isnull
from sqlalchemy.engine.cursor import CursorResult
from .rbase import is_iterable, check_least_one, check_most_one, to_type
from .rdatetime import time2str
def to_table(
data: Union[CursorResult, DataFrame, List[Dict], Iterable[Iterable]],
fields: Optional[Iterable] = None
) -> List[Dict]:
"""
Fetch data to table in `List[Dict]` format, keys and keys sort of the dictionary are the same.
Parameters
----------
data : Data.
fields : Table fields.
- `None` : Infer.
- `Iterable` : Use values in Iterable.
Returns
-------
Table in `List[Dict]` format.
"""
# Convert.
## From CursorResult object.
if isinstance(data, CursorResult):
if fields is None:
fields = data.keys()
data_table = [
dict(zip(fields, row))
for row in data
]
## From DataFrame object.
elif data.__class__ == DataFrame:
data_df = to_df(data, fields)
fields = data_df.columns
data_table = [
dict(zip(
fields,
[
None if isnull(val) else val
for val in row
]
))
for row in data_df.values
]
## From other object.
else:
data_df = to_df(data, fields)
data_table = to_table(data_df)
return data_table
def to_dict(
data: Union[CursorResult, DataFrame, List[Dict], Iterable[Iterable]],
key_field: Union[int, str] = 0,
val_field: Union[int, str] = 1
) -> List[Dict]:
"""
Fetch result as `dictionary`.
Parameters
----------
data : Data.
key_field : Key field of dictionary.
- `int` : Subscript index.
- `str` : Name index.
val_field : Value field of dictionary.
- `int` : Subscript index.
- `str` : Name index.
Returns
-------
Dictionary.
"""
# Get fields of table.
data = to_table(data)
fields = list(data[0].keys())
if key_field.__class__ == int:
key_field = fields[key_field]
if val_field.__class__ == int:
val_field = fields[val_field]
# Convert.
data_dict = {
row[key_field]: row[val_field]
for row in data
}
return data_dict
def to_df(data: Union[CursorResult, DataFrame, List[Dict], Iterable[Iterable]], fields: Optional[Iterable] = None) -> DataFrame:
"""
Fetch data to table of `DataFrame` object.
Parameters
----------
data : Data.
fields : Table fields.
- `None` : Infer.
- `Iterable` : Use values in Iterable.
Returns
-------
DataFrame object.
"""
# Convert.
## From CursorResult object.
if isinstance(data, CursorResult):
if fields is None:
fields = data.keys()
data_df = DataFrame(data, columns=fields)
data_df = data_df.convert_dtypes()
## From DataFrame object.
elif data.__class__ == DataFrame:
data_df = data.convert_dtypes()
if fields is not None:
data_df.columns = fields
## From other object.
else:
data_df = DataFrame(data, columns=fields)
data_df = data_df.convert_dtypes()
return data_df
def to_json(data: Union[CursorResult, DataFrame, List[Dict], Iterable[Iterable]], fields: Optional[Iterable] = None) -> str:
"""
Fetch data to `JSON` string.
Parameters
----------
data : Data.
fields : Table fields.
- `None` : Infer.
- `Iterable` : Use values in Iterable.
Returns
-------
JSON string.
"""
# Handle parameters.
data_df = to_df(data, fields)
# Convert.
data_json = data_df.to_json(orient="records", force_ascii=False)
return data_json
def to_sql(data: Union[CursorResult, DataFrame, List[Dict], Iterable[Iterable]], fields: Optional[Iterable] = None) -> str:
"""
Fetch data to `SQL` string.
Parameters
----------
data : Data.
fields : Table fields.
- `None` : Infer.
- `Iterable` : Use values in Iterable.
Returns
-------
SQL string.
"""
# Get fields of table.
if isinstance(data, CursorResult):
if fields is None:
fields = data.keys()
else:
data = to_table(data, fields)
fields = data[0].keys()
# Generate SQL.
sql_rows_values = [
[
repr(time2str(val, "%Y-%m-%d %H:%M:%S"))
if val is not None
else "NULL"
for val in row
]
for row in data
]
sql_rows = [
"SELECT " + ",".join(row_values)
for row_values in sql_rows_values
]
sql_row_first = "SELECT " + ",".join(
[
"%s AS `%s`" % (val, key)
for key, val in list(zip(fields, sql_rows_values[0]))
]
)
sql_rows[0] = sql_row_first
data_sql = " UNION ALL ".join(sql_rows)
return data_sql
def to_html(data: Union[CursorResult, DataFrame, List[Dict], Iterable[Iterable]], fields: Optional[Iterable] = None) -> str:
"""
Fetch data to `HTML` string.
Parameters
----------
data : Data.
fields : Table fields.
- `None` : Infer.
- `Iterable` : Use values in Iterable.
Returns
-------
HTML string.
"""
# Handle parameters.
data_df = to_df(data, fields)
# Convert.
data_html = data_df.to_html(col_space=50, index=False, justify="center")
return data_html
def to_csv(
data: Union[CursorResult, DataFrame, Iterable[Dict], Iterable],
path: str = "data.csv",
fields: Optional[Iterable] = None
) -> str:
"""
Fetch data to save `csv` format file.
Parameters
----------
data : Data.
path : File save path.
fields : Table fields.
- `None` : Infer.
- `Iterable` : Use values in Iterable.
Returns
-------
File absolute path.
"""
# Handle parameters.
data_df = to_df(data, fields)
path = os_abspath(path)
# Save file.
data_df.to_csv(path, mode="a")
return path
def to_excel(
data: Union[CursorResult, DataFrame, Iterable[Dict], Iterable],
path: str = "data.xlsx",
group_field: Optional[str] = None,
sheets_set: Dict[Union[str, int], Dict[Literal["name", "index", "filter"], Union[str, int, List[str]]]] = {}
) -> str:
"""
Fetch data to save `excel` format file and return sheet name and sheet data.
Parameters
----------
data : Data.
path : File save path.
group_field : Group filed.
sheets_set : Set sheet new name and sort sheet and filter sheet fields,
key is old name or index, value is set parameters.
- Parameter `name` : Set sheet new name.
- Parameter `index` : Sort sheet.
- Parameter `filter` : Filter sheet fields.
Returns
-------
File absolute path.
"""
# Handle parameters.
if data.__class__ != DataFrame:
data = to_df(data)
path = os_abspath(path)
# Generate sheets.
if group_field is None:
data_group = (("Sheet1", data),)
else:
data_group = data.groupby(group_field)
sheets_table_before = []
sheets_table_after = []
for index, sheet_table in enumerate(data_group):
sheet_name, sheet_df = sheet_table
if group_field is not None:
del sheet_df[group_field]
if sheet_name in sheets_set:
sheet_set = sheets_set[sheet_name]
elif index in sheets_set:
sheet_set = sheets_set[index]
else:
sheets_table_after.append((sheet_name, sheet_df))
continue
if "name" in sheet_set:
sheet_name = sheet_set["name"]
if "filter" in sheet_set:
sheet_df = sheet_df[sheet_set["filter"]]
if "index" in sheet_set:
sheets_table_before.append((sheet_set["index"], (sheet_name, sheet_df)))
else:
sheets_table_after.append((sheet_name, sheet_df))
sort_func = lambda item: item[0]
sheets_table_before.sort(key=sort_func)
sheets_table = [sheet_table for sheet_index, sheet_table in sheets_table_before] + sheets_table_after
# Save file.
excel = ExcelWriter(path)
for sheet_name, sheet_df in sheets_table:
sheet_df.to_excel(excel, sheet_name, index=False)
excel.close()
return path
def count(
data: Any,
count_value: Dict = {"size": 0, "total": 0, "types": {}},
_surface: bool = True
) -> Dict[Literal["size", "total", "types"], Union[int, Dict[Type, int]]]:
"""
`Count` data element.
Parameters
----------
data : Data.
count_value : Cumulative Count.
_surface : Whether is surface recursion.
Returns
-------
Count data.
"""
# Count Element.
count_value["total"] += 1
count_value["types"][data.__class__] = count_value["types"].get(data.__class__, 0) + 1
# Recursion.
if data.__class__ == dict:
for element in data.values():
count(element, count_value, False)
elif is_iterable(data):
for element in data:
count(element, count_value, False)
else:
count_value["size"] = count_value["size"] + 1
# End Recursion and return.
if _surface:
## Sort by count.
sorted_func = lambda key: count_value["types"][key]
sorted_key = sorted(count_value["types"], key=sorted_func, reverse=True)
count_value["types"] = {key: count_value["types"][key] for key in sorted_key}
return count_value
def flatten(data: Any, flattern_data: List = []) -> List:
"""
`Flatten` data.
Parameters
----------
data : Data.
flattern_data : Recursion cumulative data.
Returns
-------
Data after flatten.
"""
# Flatten.
## Recursion dict object.
if data.__class__ == dict:
for element in data.values():
_ = flatten(element, flattern_data)
## Recursion iterator.
elif is_iterable(data):
for element in data:
_ = flatten(element, flattern_data)
## Other.
else:
flattern_data.append(data)
return flattern_data
@overload
def split(data: Iterable, share: Optional[int] = None, bin_size: Optional[int] = None) -> List[List]: ...
@overload
def split(share: None, bin_size: None) -> NoReturn: ...
@overload
def split(share: int, bin_size: int) -> NoReturn: ...
def split(data: Iterable, share: Optional[int] = None, bin_size: Optional[int] = None) -> List[List]:
"""
`Split` data into multiple data.
Parameters
----------
data : Data.
share : Number of splie share.
bin_size : Size of each bin.
Returns
-------
Split data.
"""
# Check parameters.
check_least_one(share, bin_size)
check_most_one(share, bin_size)
# Handle parameters.
data = list(data)
# Split.
data_len = len(data)
_data = []
_data_len = 0
## by number of share.
if share is not None:
average = data_len / share
for n in range(share):
bin_size = int(average * (n + 1)) - int(average * n)
_data = data[_data_len:_data_len + bin_size]
_data.append(_data)
_data_len += bin_size
## By size of bin.
elif bin_size is not None:
while True:
_data = data[_data_len:_data_len + bin_size]
_data.append(_data)
_data_len += bin_size
if _data_len > data_len:
break
return _data
def unique(data: Iterable) -> List:
"""
`De duplication` of data.
Parameters
----------
data : Data.
Returns
-------
List after de duplication.
"""
# Handle parameters.
data = to_type(data, tuple)
# Delete duplicate.
data_unique = list(set(data))
data_unique.sort(key=data.index)
return data_unique
def ins(obj: Any, *arrays: Iterable) -> bool:
"""
`Judge` whether the object is in multiple array.
Parameters
----------
obj : Judge object.
arrays : Array.
Returns
-------
Judge result.
"""
# Judge.
for array in arrays:
if obj in array:
return True
return False
def mutual_in(*arrays: Iterable) -> bool:
"""
Whether the same element exists in `multiple` array.
Parameters
----------
arrays : Array.
Returns
-------
Judge result.
"""
# Handle parameters.
arrays = list(arrays)
# Judge.
for n, array in enumerate(arrays):
for after_array in arrays[n+1:]:
for element in array:
if ins(element, after_array):
return True
return False | /reyworm-0.6-py3-none-any.whl/reytool/rdata.py | 0.925145 | 0.600086 | rdata.py | pypi |
from typing import Any, List, Tuple, Literal, Optional, Union
from os import (
walk as os_walk,
listdir as os_listdir
)
from os.path import (
abspath as os_abspath,
join as os_join,
isfile as os_isfile,
isdir as os_isdir
)
from random import randint as random_randint
from .rbase import is_number_str
def digits(number: Union[int, float]) -> Tuple[int, int]:
"""
`Judge` the number of integer digits and deciaml digits.
Parameters
----------
number : Number to judge.
Returns
-------
Integer digits and deciaml digits.
"""
# Handle parameters.
number_str = str(number)
# Get digits.
if "." in number_str:
integer_str, decimal_str = number_str.split(".")
integer_digits = len(integer_str)
deciaml_digits = len(decimal_str)
else:
integer_digits = len(number_str)
deciaml_digits = 0
return integer_digits, deciaml_digits
def randn(*thresholds: Union[int, float], precision: Optional[int] = None) -> Union[int, float]:
"""
`Get` random number.
Parameters
----------
thresholds : Low and high thresholds of random range, range contains thresholds.
- When `length is 0`, then low and high thresholds is `0` and `10`.
- When `length is 1`, then low and high thresholds is `0` and `thresholds[0]`.
- When `length is 2`, then low and high thresholds is `thresholds[0]` and `thresholds[1]`.
precision : Precision of random range, that is maximum decimal digits of return value.
- `None` : Set to Maximum decimal digits of element of parameter `thresholds`.
- `int` : Set to this value.
Returns
-------
Random number.
- When parameters `precision` is 0, then return int.
- When parameters `precision` is greater than 0, then return float.
"""
# Handle parameters.
thresholds_len = len(thresholds)
if thresholds_len == 0:
threshold_low = 0
threshold_high = 10
elif thresholds_len == 1:
threshold_low = 0
threshold_high = thresholds[0]
elif thresholds_len == 2:
threshold_low = thresholds[0]
threshold_high = thresholds[1]
else:
raise ValueError("number of parameter 'thresholds' must is 0 or 1 or 2")
if precision is None:
threshold_low_desimal_digits = digits(threshold_low)[1]
threshold_high_desimal_digits = digits(threshold_high)[1]
desimal_digits_max = max(threshold_low_desimal_digits, threshold_high_desimal_digits)
precision = desimal_digits_max
# Get random number.
magnifier = 10 ** precision
threshold_low *= magnifier
threshold_high *= magnifier
number = random_randint(threshold_low, threshold_high)
number = number / magnifier
if precision == 0:
number = int(number)
return number
def get_paths(path: Optional[str] = None, target: Literal["all", "file", "folder"] = "all", recursion: bool = True) -> List:
"""
`Get` the path of files and folders in the `path`.
Parameters
----------
path : When None, then work path.
target : Target data.
- `Literal['all']` : Return file and folder path.
- `Literal['file']` : Return file path.
- `Literal['folder']` : Return folder path.
recursion : Is recursion directory.
Returns
-------
String is path.
"""
# Handle parameters.
if path is None:
path = ""
path = os_abspath(path)
# Get paths.
paths = []
## Recursive.
if recursion:
obj_walk = os_walk(path)
if target == "all":
targets_path = [
os_join(path, file_name)
for path, folders_name, files_name in obj_walk
for file_name in files_name + folders_name
]
paths.extend(targets_path)
elif target == "file":
targets_path = [
os_join(path, file_name)
for path, folders_name, files_name in obj_walk
for file_name in files_name
]
paths.extend(targets_path)
elif target in ("all", "folder"):
targets_path = [
os_join(path, folder_name)
for path, folders_name, files_name in obj_walk
for folder_name in folders_name
]
paths.extend(targets_path)
## Non recursive.
else:
names = os_listdir(path)
if target == "all":
for name in names:
target_path = os_join(path, name)
paths.append(target_path)
elif target == "file":
for name in names:
target_path = os_join(path, name)
is_file = os_isfile(target_path)
if is_file:
paths.append(target_path)
elif target == "folder":
for name in names:
target_path = os_join(path, name)
is_dir = os_isdir(target_path)
if is_dir:
paths.append(target_path)
return paths
def str2n(string: str) -> Any:
"""
Try `convert` string to number.
Parameters
----------
string : String.
Returns
-------
Converted number or source string.
"""
# Number.
if is_number_str(string):
if "." in string:
number = float(string)
else:
number = int(string)
return number
# Not number.
else:
return string
def n2ch(number: int) -> str:
"""
`Convert` number to chinese number.
Parameters
----------
number : Number to convert.
Returns
-------
Chinese number.
"""
# Import.
from .rregular import sub_batch
# Set parameters.
map_digit = {
"0": "零",
"1": "一",
"2": "二",
"3": "三",
"4": "四",
"5": "五",
"6": "六",
"7": "七",
"8": "八",
"9": "九",
}
map_digits = {
0: "",
1: "十",
2: "百",
3: "千",
4: "万",
5: "十",
6: "百",
7: "千",
8: "亿",
9: "十",
10: "百",
11: "千",
12: "万",
13: "十",
14: "百",
15: "千",
16: "兆"
}
# Processing parameters.
number = str(number)
# Replace digit.
for digit, digit_ch in map_digit.items():
number = number.replace(digit, digit_ch)
# Add digits.
number_list = []
for index, digit_ch in enumerate(number[::-1]):
digits_ch = map_digits[index]
number_list.insert(0, digits_ch)
number_list.insert(0, digit_ch)
number = "".join(number_list)
# Delete redundant content.
number = sub_batch(
number,
("(?<=零)[^万亿兆]", ""),
("零+", "零"),
("零(?=[万亿兆])", "")
)
if number[0:2] == "一十":
number = number[1:]
if number[-1:] == "零":
number = number[:-1]
return number | /reyworm-0.6-py3-none-any.whl/reytool/rother.py | 0.893507 | 0.523908 | rother.py | pypi |
from typing import Dict, Iterable, Optional, Union
from smtplib import SMTP
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from .rbase import get_first_notnull
class REmail(object):
"""
Rey's `E-mail` type.
"""
def __init__(
self,
email_username: str,
email_password: str,
display_from_email: Optional[str] = None
) -> None:
"""
`Set` E-mail attribute.
Parameters
----------
email_username : E-mail user name.
email_password : E-mail password.
display_from_email : Displayed from E-mail.
- `None` : Not set.
- `str` : Set this value.
"""
# Set parameters.
self.email_username = email_username
self.email_password = email_password
self.display_from_email = display_from_email
def create_email(
self,
text: Optional[str] = None,
title: Optional[str] = None,
attachment: Optional[Dict[str, Union[str, bytes]]] = None,
display_from_email: Optional[str] = None,
display_to_email: Optional[Union[str, Iterable[str]]] = None,
display_cc_email: Optional[Union[str, Iterable[str]]] = None
) -> str:
"""
`Create` string in E-mail format.
Parameters
----------
text : E-mail text.
title : E-mail title.
attachment : E-mail attachment.
- `Dict[str, str]` : File name and path.
- `Dict[str, bytes]` : File name and stream.
display_from_email : Displayed from E-mail.
display_to_email : Displayed to E-mail.
- `str` : Set this value.
- `Iterable[str]` : Set multiple values.
display_cc_email : Displayed cc E-mail.
- `str` : Set this value.
- `Iterable[str]` : Set multiple values.
Returns
-------
String in E-mail format.
"""
# Get parameters by priority.
display_from_email = get_first_notnull(display_from_email, self.display_from_email, self.email_username)
# Create E-mail object.
mime = MIMEMultipart()
if title is not None:
mime["subject"] = title
if text is not None:
mime_text = MIMEText(text)
mime.attach(mime_text)
if attachment is not None:
for file_name, file_data in attachment.items():
if file_data.__class__ == str:
with open(file_data, "rb") as file:
file_data = file.read()
mime_file = MIMEText(file_data, _charset="utf-8")
mime_file.add_header("content-disposition", "attachment", filename=file_name)
mime.attach(mime_file)
if display_from_email is not None:
mime["from"] = display_from_email
if display_to_email is not None:
if display_to_email.__class__ == str:
mime["to"] = display_to_email
else:
mime["to"] = ",".join(display_to_email)
if display_cc_email is not None:
if display_cc_email.__class__ == str:
mime["cc"] = display_cc_email
else:
mime["cc"] = ",".join(display_cc_email)
# Create string in E-mail format.
email_str = mime.as_string()
return email_str
def send_email(
self,
to_email: Union[str, Iterable[str]],
text: Optional[str] = None,
title: Optional[str] = None,
attachment: Optional[Dict[str, Union[str, bytes]]] = None,
cc_email: Optional[Union[str, Iterable[str]]] = None,
display_from_email: Optional[str] = None,
display_to_email: Optional[Union[str, Iterable[str]]] = None,
display_cc_email: Optional[Union[str, Iterable[str]]] = None
) -> None:
"""
`Send` E-mail.
Parameters
----------
to_email : To E-mail.
- `str` : Set this value.
- `Iterable[str]` : Set multiple values.
text : E-mail text.
title : E-mail title.
attachment : E-mail attachment.
- `Dict[str, str]` : File name and path.
- `Dict[str, bytes]` : File name and stream.
cc_email : Cc E-mail.
- `str` : Set this value.
- `Iterable[str]` : Set multiple values.
display_from_email : Displayed from E-mail.
display_to_email : Displayed to E-mail.
- `str` : Set this value.
- `Iterable[str]` : Set multiple values.
display_cc_email : Displayed cc E-mail.
- `str` : Set this value.
- `Iterable[str]` : Set multiple values.
"""
# Get parameters by priority.
display_from_email = get_first_notnull(display_from_email, self.display_from_email, self.email_username)
display_to_email = get_first_notnull(display_to_email, to_email)
display_cc_email = get_first_notnull(display_cc_email, cc_email)
# Handle parameters.
if to_email.__class__ == str:
to_email = [to_email]
if cc_email is not None:
if cc_email.__class__ == str:
cc_email = [cc_email]
to_email.extend(cc_email)
# Create string in E-mail format.
email_str = self.create_email(title, text, attachment, display_from_email, display_to_email, display_cc_email)
# Send E-mail.
server_domain_name = self.email_username.split("@")[-1]
server_host = "smtp." + server_domain_name
server_port = 25
smtp = SMTP(server_host, server_port)
smtp.login(self.email_username, self.email_password)
smtp.sendmail(self.email_username, to_email, email_str)
smtp.quit() | /reyworm-0.6-py3-none-any.whl/reytool/remail.py | 0.771801 | 0.225736 | remail.py | pypi |
from typing import Any, List, Literal, Optional
from pprint import pformat as pprint_pformat
from urwid import old_str_util
from .rbase import get_first_notnull, get_name
from .rmonkey import pprint_modify_format_width_judgment
from . import roption
# Based on module pprint.pformat, modify the chinese width judgment.
pprint_modify_format_width_judgment()
def split_text(text: str, man_len: int, by_width: bool = False) -> List[str]:
"""
`Split` text by max length or not greater than `display width`.
Parameters
----------
text : Text.
man_len : max length.
by_width : Whether by char displayed width count length.
Returns
-------
Split text.
"""
# Split.
texts = []
## By char displayed width.
if by_width:
str_group = []
str_width = 0
for char in text:
char_width = get_width(char)
str_width += char_width
if str_width > man_len:
string = "".join(str_group)
texts.append(string)
str_group = [char]
str_width = char_width
else:
str_group.append(char)
string = "".join(str_group)
texts.append(string)
## By char number.
else:
test_len = len(text)
split_n = test_len // man_len
if test_len % man_len:
split_n += 1
for n in range(split_n):
start_indxe = man_len * n
end_index = man_len * (n + 1)
text_group = text[start_indxe:end_index]
texts.append(text_group)
return texts
def get_width(text: str) -> int:
"""
`Get` text `display width`.
Parameters
----------
text : Text.
Returns
-------
Text display width.
"""
# Get width.
total_width = 0
for char in text:
char_unicode = ord(char)
char_width = old_str_util.get_width(char_unicode)
total_width += char_width
return total_width
def fill_width(text: str, char: str, width: int, align: Literal["left", "right", "center"] = "right") -> str:
"""
Text `fill` character by `display width`.
Parameters
----------
text : Fill text.
char : Fill character.
width : Fill width.
align : Align orientation.
- `Literal[`left`]` : Fill right, align left.
- `Literal[`right`]` : Fill left, align right.
- `Literal[`center`]` : Fill both sides, align center.
Returns
-------
Text after fill.
"""
# Check parameters.
if get_width(char) != 1:
raise ValueError("parameter `char` value error")
# Fill width.
text_width = get_width(text)
fill_width = width - text_width
if fill_width > 0:
if align == "left":
new_text = "%s%s" % (char * fill_width, text)
elif align == "right":
new_text = "%s%s" % (text, char * fill_width)
elif align == "center":
fill_width_left = int(fill_width / 2)
fill_width_right = fill_width - fill_width_left
new_text = "%s%s%s" % (char * fill_width_left, text, char * fill_width_right)
else:
raise ValueError("parameter `align` value error")
else:
new_text = text
return new_text
def print_frame(
*contents: Any,
title: Optional[str],
width: int,
frame: Literal["full", "half", "plain"]
) -> None:
"""
`Print` contents and frame.
Parameters
----------
contents : Print contents.
title : Print frame title.
- `None` : No title.
- `str` : Use this value as the title.
width : Print frame width.
frame : Frame type.
- `Literal[`full`]` : Build with symbol `═╡╞─║╟╢╔╗╚╝`, and content not can exceed the frame.
When throw `exception`, then frame is `half` type.
- `Literal[`half`]` : Build with symbol `═╡╞─`, and content can exceed the frame.
- `Literal[`plain`]` : Build with symbol `=|-`, and content can exceed the frame.
"""
# Handle parameters.
if title is None or len(title) > width - 6:
title = ""
# Generate frame.
## Full type.
if frame == "full":
if title != "":
title = f"╡ {title} ╞"
width_in = width - 2
_contents = []
try:
for content in contents:
content_str = str(content)
pieces_str = content_str.split("\n")
content_str = [
"║%s║" % fill_width(line_str, " ", width_in)
for piece_str in pieces_str
for line_str in split_text(piece_str, width_in, True)
]
content = "\n".join(content_str)
_contents.append(content)
except:
frame_top = fill_width(title, "═", width, "center")
frame_split = "─" * width
frame_bottom = "═" * width
_contents = contents
else:
frame_top = "╔%s╗" % fill_width(title, "═", width_in, "center")
# frame_split = "╠%s╣" % ("═" * width_in)
frame_split = "╟%s╢" % ("─" * width_in)
frame_bottom = "╚%s╝" % ("═" * width_in)
## Half type.
elif frame == "half":
if title != "":
title = f"╡ {title} ╞"
frame_top = fill_width(title, "═", width, "center")
frame_split = "─" * width
frame_bottom = "═" * width
_contents = contents
## Plain type.
elif frame == "plain":
if title != "":
title = f"| {title} |"
frame_top = fill_width(title, "=", width, "center")
frame_split = "-" * width
frame_bottom = "=" * width
_contents = contents
# Print.
print(frame_top)
for index, content in enumerate(_contents):
if index != 0:
print(frame_split)
print(content)
print(frame_bottom)
def rprint(
*contents: Any,
title: Optional[str] = None,
width: Optional[int] = None,
frame: Optional[Literal["full", "half", "plain"]] = "full",
format: bool = True
) -> None:
"""
`Print` formatted contents.
Parameters
----------
contents : Print contents.
title : Print frame title.
- `None` : No title.
- `str` : Use this value as the title.
width : Print frame width.
- `None` : Use `print_width` of module `roption`.
- `int` : Use this value.
frame : Frame type.
- `Literal[`full`]` : Use `print_frame_full` of module `roption`.
Build with symbol `═╡╞─║╟╢╔╗╚╝`, and content not can exceed the frame.
When throw `exception`, then frame is `half` type.
- `Literal[`half`]` : Use `print_frame_half` of module `roption`.
Build with symbol `═╡╞─`, and content can exceed the frame.
- `Literal[`plain`]` : Use `print_frame_plain` of module `roption`.
Build with symbol `=|-`, and content can exceed the frame.
format : Whether format data of type list or tuple or dict or set.
"""
# Get parameters by priority.
width = get_first_notnull(width, roption.print_width)
# Handle parameters.
if title is None:
titles = get_name(contents)
if titles is not None:
titles = [title if title[:1] != "`" else "" for title in titles]
if set(titles) != {""}:
title = " │ ".join(titles)
if frame == "full":
frame = roption.print_frame_full
elif frame == "half":
frame = roption.print_frame_half
elif frame == "plain":
frame = roption.print_frame_plain
# Format contents.
if format:
if frame == "full":
_width = width - 2
else:
_width = width
contents = [
pprint_pformat(content, width=_width, sort_dicts=False)
if content.__class__ in (list, tuple, dict, set)
else content
for content in contents
]
# Print.
print_frame(*contents, title=title, width=width, frame=frame) | /reyworm-0.6-py3-none-any.whl/reytool/rtext.py | 0.868729 | 0.268081 | rtext.py | pypi |
# General idea
* Wheels only
* Only support Python 3 rez installs, but must support installing Python 2 wheels
* Use [standalone pip](https://pip.pypa.io/en/stable/installation/#standalone-zip-application) (and make sure we have an automatic update mechanism). It will be bundled I guess?
Or we have a bootstrap command that would create a rez package for the pip zipapp.
* We need to fix the "scripts wrapper" shebang problem once and for all. This could be solved by using https://pypi.org/project/installer/.
Using pip zipapp, we can have a standalone pip that will work across multiple python versions and installs.
This simplifies the process quite a lot. No more "shared" pip install, no more "where is pip and which one
to use?" and no more "which python will be used?".
Latest pip version has a `--dry-run` flag that can be used in combination with `--report` to
outout a JSON of the packages that would be installed.
For example,
```
python pip.pyz install -q requests --dry-run --ignore-installed --python-version 2.7 --only-binary=:all: --target /tmp/asd --report -
```
outputs something like:
<details>
<summary>Output</summary>
```json
{
"version": "0",
"pip_version": "22.3.1",
"install": [
{
"download_info": {
"url": "https://files.pythonhosted.org/packages/29/c1/24814557f1d22c56d50280771a17307e6bf87b70727d975fd6b2ce6b014a/requests-2.25.1-py2.py3-none-any.whl",
"archive_info": {
"hash": "sha256=c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"
}
},
"is_direct": false,
"requested": true,
"metadata": {
"metadata_version": "2.1",
"name": "requests",
"version": "2.25.1",
"platform": [
"UNKNOWN"
],
"summary": "Python HTTP for Humans.",
"description_content_type": "text/markdown",
"home_page": "https://requests.readthedocs.io",
"author": "Kenneth Reitz",
"author_email": "me@kennethreitz.org",
"license": "Apache 2.0",
"classifier": [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Natural Language :: English",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy"
],
"requires_dist": [
"chardet (<5,>=3.0.2)",
"idna (<3,>=2.5)",
"urllib3 (<1.27,>=1.21.1)",
"certifi (>=2017.4.17)",
"pyOpenSSL (>=0.14) ; extra == 'security'",
"cryptography (>=1.3.4) ; extra == 'security'",
"PySocks (!=1.5.7,>=1.5.6) ; extra == 'socks'",
"win-inet-pton ; (sys_platform == \"win32\" and python_version == \"2.7\") and extra == 'socks'"
],
"requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*",
"project_url": [
"Documentation, https://requests.readthedocs.io",
"Source, https://github.com/psf/requests"
],
"provides_extra": [
"security",
"socks"
],
"description": ""
}
},
{
"download_info": {
"url": "https://files.pythonhosted.org/packages/37/45/946c02767aabb873146011e665728b680884cd8fe70dde973c640e45b775/certifi-2021.10.8-py2.py3-none-any.whl",
"archive_info": {
"hash": "sha256=d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"
}
},
"is_direct": false,
"requested": false,
"metadata": {
"metadata_version": "2.1",
"name": "certifi",
"version": "2021.10.8",
"platform": [
"UNKNOWN"
],
"summary": "Python package for providing Mozilla's CA Bundle.",
"home_page": "https://certifiio.readthedocs.io/en/latest/",
"author": "Kenneth Reitz",
"author_email": "me@kennethreitz.com",
"license": "MPL-2.0",
"classifier": [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
"Natural Language :: English",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9"
],
"project_url": [
"Documentation, https://certifiio.readthedocs.io/en/latest/",
"Source, https://github.com/certifi/python-certifi"
],
"description": ""
}
},
{
"download_info": {
"url": "https://files.pythonhosted.org/packages/19/c7/fa589626997dd07bd87d9269342ccb74b1720384a4d739a1872bd84fbe68/chardet-4.0.0-py2.py3-none-any.whl",
"archive_info": {
"hash": "sha256=f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"
}
},
"is_direct": false,
"requested": false,
"metadata": {
"metadata_version": "2.1",
"name": "chardet",
"version": "4.0.0",
"platform": [
"UNKNOWN"
],
"summary": "Universal encoding detector for Python 2 and 3",
"keywords": [
"encoding",
"i18n",
"xml"
],
"home_page": "https://github.com/chardet/chardet",
"author": "Mark Pilgrim",
"author_email": "mark@diveintomark.org",
"maintainer": "Daniel Blanchard",
"maintainer_email": "dan.blanchard@gmail.com",
"license": "LGPL",
"classifier": [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Text Processing :: Linguistic"
],
"requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*",
"description": ""
}
},
{
"download_info": {
"url": "https://files.pythonhosted.org/packages/a2/38/928ddce2273eaa564f6f50de919327bf3a00f091b5baba8dfa9460f3a8a8/idna-2.10-py2.py3-none-any.whl",
"archive_info": {
"hash": "sha256=b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"
}
},
"is_direct": false,
"requested": false,
"metadata": {
"metadata_version": "2.1",
"name": "idna",
"version": "2.10",
"platform": [
"UNKNOWN"
],
"summary": "Internationalized Domain Names in Applications (IDNA)",
"home_page": "https://github.com/kjd/idna",
"author": "Kim Davies",
"author_email": "kim@cynosure.com.au",
"license": "BSD-like",
"classifier": [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Internet :: Name Service (DNS)",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Utilities"
],
"requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
"description": ""
}
},
{
"download_info": {
"url": "https://files.pythonhosted.org/packages/65/0c/cc6644eaa594585e5875f46f3c83ee8762b647b51fc5b0fb253a242df2dc/urllib3-1.26.13-py2.py3-none-any.whl",
"archive_info": {
"hash": "sha256=47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"
}
},
"is_direct": false,
"requested": false,
"metadata": {
"metadata_version": "2.1",
"name": "urllib3",
"version": "1.26.13",
"summary": "HTTP library with thread-safe connection pooling, file post, and more.",
"description_content_type": "text/x-rst",
"keywords": [
"urllib",
"httplib",
"threadsafe",
"filepost",
"http",
"https",
"ssl",
"pooling"
],
"home_page": "https://urllib3.readthedocs.io/",
"author": "Andrey Petrov",
"author_email": "andrey.petrov@shazow.net",
"license": "MIT",
"classifier": [
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development :: Libraries"
],
"requires_dist": [
"brotlicffi (>=0.8.0) ; ((os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation != \"CPython\") and extra == 'brotli'",
"brotli (>=1.0.9) ; ((os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation == \"CPython\") and extra == 'brotli'",
"brotlipy (>=0.6.0) ; (os_name == \"nt\" and python_version < \"3\") and extra == 'brotli'",
"pyOpenSSL (>=0.14) ; extra == 'secure'",
"cryptography (>=1.3.4) ; extra == 'secure'",
"idna (>=2.0.0) ; extra == 'secure'",
"certifi ; extra == 'secure'",
"urllib3-secure-extra ; extra == 'secure'",
"ipaddress ; (python_version == \"2.7\") and extra == 'secure'",
"PySocks (!=1.5.7,<2.0,>=1.5.6) ; extra == 'socks'"
],
"requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*",
"project_url": [
"Documentation, https://urllib3.readthedocs.io/",
"Code, https://github.com/urllib3/urllib3",
"Issue tracker, https://github.com/urllib3/urllib3/issues"
],
"provides_extra": [
"brotli",
"secure",
"socks"
],
"description": ""
}
}
],
"environment": {
"implementation_name": "cpython",
"implementation_version": "3.10.8",
"os_name": "posix",
"platform_machine": "x86_64",
"platform_release": "6.0.8-arch1-1",
"platform_system": "Linux",
"platform_version": "#1 SMP PREEMPT_DYNAMIC Thu, 10 Nov 2022 21:14:24 +0000",
"python_full_version": "3.10.8",
"platform_python_implementation": "CPython",
"python_version": "3.10",
"sys_platform": "linux"
}
}
```
</details>
I'm still not sure if we should use `--dry-run` and manually download the wheels + install them using https://pypi.org/project/installer/
or if we shuold just `pip isntall --target`. Using `installer` would allow us to control
how the "scripts wrapper" are constructed and managed.
| /rez_pip-0.2.0.tar.gz/rez_pip-0.2.0/NOTES.md | 0.647575 | 0.787523 | NOTES.md | pypi |
[](https://github.com/AcademySoftwareFoundation/rez/releases)
[](https://pypi.org/project/rez)<br>
[](https://github.com/AcademySoftwareFoundation/rez/actions?query=workflow%3Acore+branch%3Amaster)
[](https://github.com/AcademySoftwareFoundation/rez/actions?query=workflow%3Aubuntu+branch%3Amaster)
[](https://github.com/AcademySoftwareFoundation/rez/actions?query=workflow%3Amac+branch%3Amaster)
[](https://github.com/AcademySoftwareFoundation/rez/actions?query=workflow%3AWindows+branch%3Amaster)<br>
[](https://github.com/AcademySoftwareFoundation/rez/actions?query=workflow%3Ainstallation+branch%3Amaster)
[](https://github.com/AcademySoftwareFoundation/rez/actions?query=workflow%3Aflake8+branch%3Amaster)
[](https://github.com/AcademySoftwareFoundation/rez/actions?query=workflow%3Awiki+event%3Arelease)
[](https://github.com/AcademySoftwareFoundation/rez/actions?query=workflow%3Apypi+event%3Arelease)
[](https://github.com/AcademySoftwareFoundation/rez/actions?query=workflow%3Abenchmark+event%3Arelease)<br>
[](https://sonarcloud.io/summary/new_code?id=nerdvegas_rez)
[](https://sonarcloud.io/summary/new_code?id=nerdvegas_rez)
[](https://sonarcloud.io/summary/new_code?id=nerdvegas_rez)
[](https://sonarcloud.io/summary/new_code?id=nerdvegas_rez)
[](https://sonarcloud.io/summary/new_code?id=nerdvegas_rez)
[](https://sonarcloud.io/summary/new_code?id=nerdvegas_rez)
- [What Is Rez?](#what-is-rez)
- [The Basics](#the-basics)
- [Examples](#examples)
- [Quickstart](#quickstart)
- [Building Your First Package](#building-your-first-package)
- [Features](#features)
## What Is Rez?
Rez is a cross-platform package manager with a difference. Using Rez you can create
standalone environments configured for a given set of packages. However, unlike many
other package managers, packages are not installed into these standalone environments.
Instead, all package versions are installed into a central repository, and standalone
environments reference these existing packages. This means that configured environments
are lightweight, and very fast to create, often taking just a few seconds to configure
despite containing hundreds of packages.
See [the wiki](https://github.com/AcademySoftwareFoundation/rez/wiki) for full documentation.
<p align="center">
<a href="https://github.com/AcademySoftwareFoundation/rez/wiki/media/other_pkg_mgr.png">
<img src="https://github.com/AcademySoftwareFoundation/rez/wiki/media/other_pkg_mgr.png"></a>
<br><i>Typical package managers install packages into an environment</i>
</p>
<br>
<p align="center">
<a href="https://github.com/AcademySoftwareFoundation/rez/wiki/media/rez_pkg_mgr.png">
<img src="https://github.com/AcademySoftwareFoundation/rez/wiki/media/rez_pkg_mgr.png"></a>
<br><i>Rez installs packages once, and configures environments dynamically</i>
</p>
<br>
Rez takes a list of package requests, and constructs the target environment, resolving
all the necessary package dependencies. Any type of software package is supported -
compiled, python, applications and libraries.
## The Basics
Packages are stored in repositories on disk. Each package has a single concise
definition file (*package.py*) that defines its dependencies, its commands (how it
configures the environment containing it), and other metadata. For example, the
following is the package definition file for the popular *requests* python module:
name = "requests"
version = "2.8.1"
authors = ["Kenneth Reitz"]
requires = [
"python-2.7+"
]
def commands():
env.PYTHONPATH.append("{root}/python")
This package requires python-2.7 or greater. When used, the 'python' subdirectory
within its install location is appended to the PYTHONPATH environment variable.
When an environment is created with the rez API or *rez-env* tool, a dependency
resolution algorithm tracks package requirements and resolves to a list of needed
packages. The commands from these packages are concatenated and evaluated, resulting
in a configured environment. Rez is able to configure environments containing
hundreds of packages, often within a few seconds. Resolves can also be saved to file,
and when re-evaluated later will reconstruct the same environment once more.
## Examples
This example places the user into a resolved shell containing the requested packages,
using the [rez-env](https://github.com/AcademySoftwareFoundation/rez/wiki/Command-Line-Tools#rez-env) tool:
]$ rez-env requests-2.2+ python-2.6 'pymongo-0+<2.7'
You are now in a rez-configured environment.
resolved by ajohns@nn188.somewhere.com, on Wed Feb 26 15:56:20 2014, using Rez v2.0.0
requested packages:
requests-2.2+
python-2.6
pymongo-0+<2.7
resolved packages:
python-2.6.8 /software/ext/python/2.6.8
platform-linux /software/ext/platform/linux
requests-2.2.1 /software/ext/requests/2.2.1/python-2.6
pymongo-2.6.3 /software/ext/pymongo/2.6.3
arch-x86_64 /software/ext/arch/x86_64
> ]$ _
This example creates an environment containing the package 'houdini' version 12.5
or greater, and runs the command 'hescape -h' inside that environment:
]$ rez-env houdini-12.5+ -- hescape -h
Usage: hescape [-foreground] [-s editor] [filename ...]
-h: output this usage message
-s: specify starting desktop by name
-foreground: starts process in foreground
Resolved environments can also be created via the API:
>>> import subprocess
>>> from rez.resolved_context import ResolvedContext
>>>
>>> r = ResolvedContext(["houdini-12.5+", "houdini-0+<13", "java", "!java-1.8+"])
>>> p = r.execute_shell(command='which hescape', stdout=subprocess.PIPE)
>>> out, err = p.communicate()
>>>
>>> print(out)
'/software/ext/houdini/12.5.562/bin/hescape'
## Quickstart
First, install Rez. Download the source, and from the source directory, run
(with DEST_DIR replaced with your install location):
]$ python ./install.py -v DEST_DIR
This installs the Rez command line tools. It will print a message at the end
telling you how to use Rez when the installation has completed. Rez is not a
normal Python package and so you do not typically install it with pip or setup.py.
Do *not* move the installation - re-install to a new location if you want to
change the install path. If you want to install rez for multiple operating
systems, perform separate installs for each of those systems.
Next, you need to create some essential Rez packages. The *rez-bind* tool creates
Rez packages that are based on software already installed on your system. Try
binding the following list of packages (note that for Python, you may need
administrative privileges):
]$ rez-bind platform
]$ rez-bind arch
]$ rez-bind os
]$ rez-bind python
Now you should be able to create an environment containing Python. Try this:
]$ rez-env python -- which python
/home/ajohns/packages/python-2.7.8/platform-linux/arch-x86_64/os-Ubuntu-12.04/bin/python
## Building Your First Package
The *rez-build* tool is used to build packages and install them locally (typically
to *$HOME/packages*). Once you've done that, you can use them via *rez-env*, just
like any other package:
]$ cd example_packages/hello_world
]$ rez-build --install
...
]$ rez-env hello_world -- hello
Hello world!
## Features
* Supports Linux, OSX and Windows;
* Allows for a fast and efficient build-install-test cycle;
* Creates shells of type: bash, tcsh, other (shells can be added as plugins);
* Contains a deployment system supporting git, mercurial and svn (as plugins);
* Environment resolves can be saved to disk and reused at a later date (a bit
like VirtualEnv);
* Highly pluggable, supports five different plugin types to do things from
adding new shell types, to adding new build systems;
* Contains a version resolving algorithm, for avoiding version clashes;
* Visualises resolved environments in a rendered dot-graph;
* Packages are found in a search path, so different packages can be deployed
to different locations;
* Supports alphanumeric version numbers;
* Has a powerful version requirements syntax, able to describe any version
range, and a conflict operator for rejecting version ranges;
* Package 'variants' - a way to define different flavors of the same package
version, for example a plugin built for multiple versions of the host app;
* Custom release hooks (such as post-release operations) can be added as plugins;
* Has a time lock feature, which allows old resolves to be recreated (newer
packages are ignored);
* Package definitions are a single, succinct file;
* Packages define their effect on the environment (adding to PATH etc) in a
platform- and shell- agnostic way, using a dedicated python API;
* Has a memcached-based caching system, for caching environment resolves;
* Has a package filtering feature, allowing for staged package releases such as
alpha and beta packages.
## Known issues and limitations
* Currently CMake builds do not function on Windows with Rez and
the related tests are skipped. A fix requires multiple changes that are on
the roadmap. Users have successfully implemented workarounds to utilize
CMake with Rez under Windows, but the goal is to provide a seamless experience
on any platform in the future. For details see this [issue](/../../issues/703)
| /rez-2.112.0.tar.gz/rez-2.112.0/README.md | 0.687735 | 0.799951 | README.md | pypi |
from Qt import QtCompat, QtCore, QtWidgets, QtGui
from rezgui.util import update_font, create_pane, interp_color
from rezgui.widgets.EffectivePackageCellWidget import EffectivePackageCellWidget
from rezgui.widgets.PackageSelectWidget import PackageSelectWidget
from rezgui.widgets.VariantCellWidget import VariantCellWidget
from rezgui.widgets.IconButton import IconButton
from rezgui.mixins.ContextViewMixin import ContextViewMixin
from rezgui.models.ContextModel import ContextModel
from rezgui.objects.App import app
from rez.packages import iter_packages
from rez.vendor.version.requirement import Requirement
from rez.vendor.version.version import VersionRange
from functools import partial
import os.path
class CompareCell(QtWidgets.QWidget):
def __init__(self, context_model, variant_left=None, variant_right=None,
parent=None):
super(CompareCell, self).__init__(parent)
self.context_model = context_model
self.left_variant = variant_left
self.right_variant = variant_right
self.color = None
self.side = None
self.mode = None
self.comparable = False
package_paths = self.context_model.packages_path
widget = None
if self.left_variant and self.right_variant:
self.side = "both"
equal_versions = (self.left_variant.version == self.right_variant.version)
right_variant_visible = (self.right_variant.wrapped.location in package_paths)
self.comparable = right_variant_visible and not equal_versions
if self.comparable:
# determine how far apart the variant versions are
versions = sorted([self.left_variant.version,
self.right_variant.version])
range_ = VersionRange.as_span(*versions)
it = iter_packages(name=self.left_variant.name,
paths=package_paths, range_=range_)
diff_num = sum(1 for x in it) - 1
unit = "version" if diff_num == 1 else "versions"
icon_suffixes = {1: "_1", 2: "_2", 3: "_3"}
icon_suffix = icon_suffixes.get(diff_num, "")
if self.left_variant == self.right_variant:
self.mode = "equal_to"
self._set_color(0.7, 0.7, 0.7)
widget = IconButton("equal_to", "packages are equal")
elif self.left_variant.version == self.right_variant.version:
# same version, but package is different. This can happen when
# a local package is released which then hides a central package
# of the same version
self.mode = "equalish"
self._set_color(1, 1, 0)
widget = IconButton(
"equalish", "packages versions are equal, but package is different")
elif self.left_variant.version > self.right_variant.version:
self.mode = "greater_than"
self._set_color(0, 1, 0)
if self.comparable:
desc = "package is %d %s ahead" % (diff_num, unit)
widget = IconButton("greater_than" + icon_suffix, desc)
else:
widget = IconButton("greater_than", "package is newer")
else:
self.mode = "less_than"
self._set_color(1, 0, 0)
if self.comparable:
desc = "package is %d %s behind" % (diff_num, unit)
widget = IconButton("less_than" + icon_suffix, desc)
else:
widget = IconButton("less_than", "package is older")
elif self.right_variant:
self.side = "right"
self.mode = "missing"
self._set_color(1, 0, 0)
widget = IconButton("missing", "package is missing")
elif self.left_variant:
self.side = "left"
self.mode = "new"
self._set_color(0, 1, 0)
widget = IconButton("new", "package is new")
if widget:
create_pane([None, widget, None], True, compact=True,
parent_widget=self)
widget.clicked.connect(self._clicked)
def left(self):
return (self.side in ("left", "both"))
def right(self):
return (self.side in ("right", "both"))
def _clicked(self):
if self.comparable:
from rezgui.dialogs.VariantVersionsDialog import VariantVersionsDialog
dlg = VariantVersionsDialog(self.context_model, self.left_variant,
reference_variant=self.right_variant,
parent=self)
dlg.exec_()
elif self.mode == "equal_to":
QtWidgets.QMessageBox.information(
self,
"Equal Package",
"The packages are equal")
elif self.mode == "equalish":
QtWidgets.QMessageBox.information(
self,
"Equal Version Package",
"The package in the current resolve:\n(%s)\n\nis the same "
"version as the package in the reference resolve:\n(%s)\n\n"
"but is a different package."
% (self.left_variant.uri, self.right_variant.uri))
elif self.mode == "missing":
QtWidgets.QMessageBox.information(
self,
"Missing Package",
"The package is present in the reference resolve only")
elif self.mode == "new":
QtWidgets.QMessageBox.information(
self,
"New Package",
"The package is present in the current resolve only")
elif self.mode == "greater_than":
QtWidgets.QMessageBox.information(
self,
"Newer Package",
"The package in the current resolve:\n(%s)\n\nis newer than "
"the package in the reference resolve (%s)"
% (self.left_variant.uri, self.right_variant.uri))
else:
QtWidgets.QMessageBox.information(
self,
"Older Package",
"The package in the current resolve:\n(%s)\n\nis older than "
"the package in the reference resolve (%s)"
% (self.left_variant.uri, self.right_variant.uri))
def _set_color(self, *c):
f = 0.8
col = self.palette().color(QtGui.QPalette.Active, QtGui.QPalette.Base)
bg_c = (col.redF(), col.greenF(), col.blueF())
bg_c = [x * f for x in bg_c]
c = [x * (1 - f) for x in c]
c = [x + y for x, y in zip(bg_c, c)]
self.color = QtGui.QColor.fromRgbF(*c)
class CellDelegate(QtWidgets.QStyledItemDelegate):
def __init__(self, parent=None):
super(CellDelegate, self).__init__(parent)
pal = self.parent().palette()
col = pal.color(QtGui.QPalette.Active, QtGui.QPalette.Button)
self.pen = QtGui.QPen(col)
self.stale_color = QtGui.QColor("orange")
self.stale_pen = QtGui.QPen(self.stale_color)
self.stale_pen.setWidth(2)
self.pen.setCosmetic(True)
self.stale_pen.setCosmetic(True)
self.path = QtGui.QPainterPath()
self.path.moveTo(0, 0)
self.path.cubicTo(0.6, 0, -0.2, 0.5, 1, 0.5)
self.path.cubicTo(-0.2, 0.5, 0.6, 1, 0, 1)
highlight_color = pal.color(QtGui.QPalette.Highlight)
base_color = pal.color(QtGui.QPalette.Base)
c1 = interp_color(highlight_color, base_color, 0.3)
c2 = interp_color(highlight_color, base_color, 0.8)
grad = QtGui.QLinearGradient(0, 1, 0, 0)
grad.setCoordinateMode(QtGui.QGradient.ObjectBoundingMode)
grad.setColorAt(0, c1)
grad.setColorAt(0.95, c2)
grad.setColorAt(1, c1)
self.highlight_brush = QtGui.QBrush(grad)
def paint(self, painter, option, index):
row = index.row()
column = index.column()
table = self.parent()
cmp_widget = table.cellWidget(row, 2)
stale = table.context_model.is_stale()
rect = option.rect
oldbrush = painter.brush()
oldpen = painter.pen()
pal = table.palette()
def _setpen(to_stale):
pen = self.stale_pen if stale and to_stale else self.pen
painter.setPen(pen)
# determine cell bg color and paint it
selected_cells = set((x.row(), x.column()) for x in table.selectedIndexes())
bg_color = None
if (row, column) in selected_cells:
bg_color = self.highlight_brush
elif cmp_widget and \
((cmp_widget.left() and column == 1)
or (cmp_widget.right() and column == 3)):
bg_color = cmp_widget.color
else:
bg_color = pal.color(QtGui.QPalette.Base)
painter.fillRect(rect, bg_color)
# draw grid lines
r = (rect.topRight(), rect.bottomRight())
b = (rect.bottomLeft(), rect.bottomRight() - QtCore.QPoint(1, 0))
_setpen(column < 2)
if column == 0:
painter.drawLine(*r)
_setpen(False)
painter.drawLine(*b)
elif column == 1:
if not cmp_widget or not cmp_widget.left():
painter.drawLine(*r)
if row == table.rowCount() - 1:
painter.drawLine(*b)
else:
if stale and row == 0:
painter.drawLine(rect.topLeft(), rect.topRight())
_setpen(False)
painter.drawLine(*b)
elif column == 2:
# draw the curvy bits in the comparison column
draw_right_edge = True
def _draw_path():
painter.setRenderHints(QtGui.QPainter.Antialiasing, True)
painter.drawPath(self.path)
painter.resetTransform()
painter.setRenderHints(QtGui.QPainter.Antialiasing, False)
if cmp_widget:
if cmp_widget.left():
painter.translate(rect.topLeft() - QtCore.QPoint(1, 0.5))
painter.scale(rect.width() / 2.5, rect.height())
_setpen(True)
if stale:
pen = QtGui.QPen(self.stale_color)
pen.setCosmetic(True)
pen.setWidthF(1.5)
painter.setPen(pen)
if (row, 1) in selected_cells:
painter.setBrush(self.highlight_brush)
elif cmp_widget.color:
painter.setBrush(QtGui.QBrush(cmp_widget.color))
_draw_path()
_setpen(False)
if cmp_widget.right():
painter.translate(rect.topRight() - QtCore.QPoint(-1, 0.5))
painter.scale(-rect.width() / 2.5, rect.height())
if (row, 3) in selected_cells:
painter.setBrush(self.highlight_brush)
elif cmp_widget.color:
painter.setBrush(QtGui.QBrush(cmp_widget.color))
_draw_path()
draw_right_edge = False
if draw_right_edge:
painter.drawLine(*r)
else:
painter.drawLine(*r)
painter.drawLine(*b)
painter.setPen(oldpen)
painter.setBrush(oldbrush)
if cmp_widget and column in (1, 3):
index = table.model().index(row, 2)
table.update(index)
class ContextTableWidget(QtWidgets.QTableWidget, ContextViewMixin):
default_row_count = 10
double_arrow = u"\u27FA"
short_double_arrow = u"\u21D4"
variantSelected = QtCore.Signal(object)
def __init__(self, context_model=None, parent=None):
"""Create a context table."""
super(ContextTableWidget, self).__init__(self.default_row_count,
2, parent)
ContextViewMixin.__init__(self, context_model)
self.diff_mode = False
self.diff_context_model = None
self.diff_from_source = False
self._show_effective_request = False
self._current_variant = None
self.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.setSelectionMode(QtWidgets.QAbstractItemView.SingleSelection)
hh = self.horizontalHeader()
hh.setDefaultSectionSize(12 * self.fontMetrics().height())
vh = self.verticalHeader()
QtCompat.QHeaderView.setSectionResizeMode(
vh, QtWidgets.QHeaderView.ResizeToContents)
vh.setVisible(False)
self.delegate = CellDelegate(self)
self.setItemDelegate(self.delegate)
self.setShowGrid(False)
self.currentCellChanged.connect(self._currentCellChanged)
self.itemSelectionChanged.connect(self._itemSelectionChanged)
self.refresh()
def selectionCommand(self, index, event=None):
row = index.row()
column = index.column()
widget = self.cellWidget(row, column)
if self._widget_is_selectable(widget):
return QtCore.QItemSelectionModel.ClearAndSelect
else:
return QtCore.QItemSelectionModel.Clear
def current_variant(self):
"""Returns the currently selected variant, if any."""
return self._current_variant
def show_effective_request(self, b):
if b != self._show_effective_request:
self._show_effective_request = b
self._update_request_column(0, self.context_model)
if self.diff_mode:
self._update_request_column(4, self.diff_context_model)
def get_request(self):
"""Get the current request list.
Returns:
List of strings.
"""
return self._get_request(0)
def enter_diff_mode(self, context_model=None):
"""Enter diff mode.
Args:
context_model (`ContextModel`): Context to diff against. If None, a
copy of the current context is used.
"""
assert not self.diff_mode
self.diff_mode = True
if context_model is None:
self.diff_from_source = True
self.diff_context_model = self.context_model.copy()
else:
self.diff_from_source = False
self.diff_context_model = context_model
self.clear()
self.setColumnCount(5)
self.refresh()
def leave_diff_mode(self):
"""Leave diff mode."""
assert self.diff_mode
self.diff_mode = False
self.diff_context_model = None
self.diff_from_source = False
self.setColumnCount(2)
self.refresh()
def revert_to_diff(self):
assert self.diff_mode
source_context = self.diff_context_model.context()
self.context_model.set_context(source_context)
def revert_to_disk(self):
filepath = self.context_model.filepath()
assert filepath
disk_context = app.load_context(filepath)
self.context_model.set_context(disk_context)
def get_title(self):
"""Returns a string suitable for titling a window containing this table."""
def _title(context_model):
context = context_model.context()
if context is None:
return "new context*"
title = os.path.basename(context.load_path) if context.load_path \
else "new context"
if context_model.is_modified():
title += '*'
return title
if self.diff_mode:
diff_title = _title(self.diff_context_model)
if self.diff_from_source:
diff_title += "'"
return "%s %s %s" % (_title(self.context_model),
self.short_double_arrow, diff_title)
else:
return _title(self.context_model)
# Stops focus loss when a widget inside the table is selected. In an MDI app
# this can cause the current subwindow to lose focus.
def clear(self):
self.setFocus()
super(ContextTableWidget, self).clear()
def select_variant(self, name):
for row, widget in self._iter_column_widgets(1, VariantCellWidget):
if widget.variant.name == str(name):
self.setCurrentIndex(self.model().index(row, 1))
return
def refresh(self):
self._contextChanged(ContextModel.CONTEXT_CHANGED)
def _contextChanged(self, flags=0):
update_request_columns = {}
# apply request and variant widgets to columns
if flags & ContextModel.CONTEXT_CHANGED:
self.clear()
if self.diff_mode:
hh = self.horizontalHeader()
QtCompat.QHeaderView.setSectionResizeMode(
hh, 2, QtWidgets.QHeaderView.Fixed)
self.setColumnWidth(2, 50)
if self.context():
if self.diff_mode:
self._apply_request(self.diff_context_model, 4)
self._apply_resolve(self.diff_context_model, 3, 4,
hide_locks=True, read_only=True)
self._apply_request(self.context_model, 0)
self._apply_resolve(self.context_model, 1, 3,
reference_column_is_variants=True)
self._update_comparison_column(2)
update_request_columns[4] = self.diff_context_model
else:
self._apply_request(self.context_model, 0)
self._apply_resolve(self.context_model, 1, 0)
else:
self._set_package_cell(0, 0)
update_request_columns[0] = self.context_model
if flags & ContextModel.LOCKS_CHANGED and self._show_effective_request:
update_request_columns[0] = self.context_model
for column, context_model in update_request_columns.items():
self._update_request_column(column, context_model)
# set column headers
if self.diff_mode:
headers = [["current request", False],
["current resolve", False],
[self.double_arrow, False],
["reference resolve", True],
["reference request", True]]
else:
headers = [["request", False],
["resolve", False]]
if self.context_model.is_stale():
headers[0][0] += '*'
headers[1][0] += " (stale)"
headers[1][1] = True
for column, (label, italic) in enumerate(headers):
item = QtWidgets.QTableWidgetItem(label)
update_font(item, italic=italic)
self.setHorizontalHeaderItem(column, item)
self.update()
def _update_request_column(self, column, context_model):
# remove effective request cells
for row, widget in self._iter_column_widgets(column, EffectivePackageCellWidget):
self.removeCellWidget(row, column)
# update effective request cells
if self._show_effective_request:
# get row following package select widgets
last_row = -1
for row, widget in self._iter_column_widgets(column, PackageSelectWidget):
last_row = row
row = last_row + 1
for request_str in context_model.implicit_packages:
self._set_effective_package_cell(row, column, request_str, "implicit")
row += 1
d = context_model.get_lock_requests()
for lock, requests in d.items():
for request in requests:
request_str = str(request)
self._set_effective_package_cell(row, column, request_str, lock.name)
row += 1
self._trim_trailing_rows()
def _widget_is_selectable(self, widget):
return (widget
and widget.isEnabled()
and isinstance(widget, VariantCellWidget)
and not widget.read_only)
def _currentCellChanged(self, currentRow, currentColumn,
previousRow, previousColumn):
widget = self.cellWidget(currentRow, currentColumn)
if self._widget_is_selectable(widget):
self._current_variant = widget.variant
else:
self._current_variant = None
self.setCurrentIndex(QtCore.QModelIndex())
# update other variants, this causes them to show/hide the depends icon
if previousColumn != currentColumn:
for _, widget in self._iter_column_widgets(previousColumn, VariantCellWidget):
widget.set_reference_sibling(None)
for _, widget in self._iter_column_widgets(currentColumn, VariantCellWidget):
widget.set_reference_sibling(self._current_variant)
# new selection is failing to cause a paint update sometimes?? This
# seems to help but does not 100% fix the problem.
self.update(self.model().index(previousRow, previousColumn))
self.update(self.model().index(currentRow, currentColumn))
self.variantSelected.emit(self._current_variant)
# this is only here to clear the current index, which leaves an annoying
# visual cue even though the cell is not selected
def _itemSelectionChanged(self):
if not self.selectedIndexes():
self.setCurrentIndex(QtCore.QModelIndex())
def _iter_column_widgets(self, column, types=None):
types = types or QtWidgets.QWidget
for row in range(self.rowCount()):
widget = self.cellWidget(row, column)
if widget and isinstance(widget, types):
yield row, widget
def _get_request(self, column):
request_strs = []
for _, edit in self._iter_column_widgets(column, PackageSelectWidget):
txt = str(edit.text()).strip()
if txt:
request_strs.append(txt)
return request_strs
def _apply_request(self, context_model, column):
context = context_model.context()
requests = context.requested_packages()
num_requests = len(requests)
for i, request in enumerate(requests):
self._set_package_cell(i, column, request)
self._set_package_cell(num_requests, column)
def _apply_resolve(self, context_model, column, reference_column,
hide_locks=False, read_only=False,
reference_column_is_variants=False):
context = context_model.context()
resolved = context.resolved_packages[:]
consumed_rows = set()
# match variants up with matching request/variant in source column
for row, widget in self._iter_column_widgets(
reference_column, (PackageSelectWidget, VariantCellWidget)):
request_str = str(widget.text())
if not request_str:
continue
package_name = Requirement(request_str).name
matches = [x for x in resolved if x.name == package_name]
if matches:
variant = matches[0]
resolved = [x for x in resolved if x.name != package_name]
reference_variant = None
if reference_column_is_variants and isinstance(widget, VariantCellWidget):
reference_variant = widget.variant
self._set_variant_cell(row, column, context_model, variant,
reference_variant=reference_variant,
hide_locks=hide_locks, read_only=read_only)
consumed_rows.add(row)
# append variants that don't match reference requests/variants
if reference_column_is_variants:
hide_locks = True
row = 0
while resolved:
variant = resolved[0]
resolved = resolved[1:]
while row in consumed_rows:
row += 1
self._set_variant_cell(row, column, context_model, variant,
hide_locks=hide_locks, read_only=read_only)
row += 1
def _update_comparison_column(self, column):
#no_color = self.palette().color(QtGui.QPalette.Active, QtGui.QPalette.Base)
for row in range(self.rowCount()):
left = self.cellWidget(row, column - 1)
right = self.cellWidget(row, column + 1)
left_variant = left.variant if left else None
right_variant = right.variant if right else None
if left_variant or right_variant:
widget = CompareCell(self.context_model, left_variant, right_variant)
self.setCellWidget(row, column, widget)
def _set_package_cell(self, row, column, request=None):
if row >= self.rowCount():
self.setRowCount(row + 1)
if request is None:
# don't overwrite existing package request
widget = self.cellWidget(row, column)
if widget and isinstance(widget, PackageSelectWidget):
return None
txt = str(request) if request else ""
read_only = (column != 0)
edit = PackageSelectWidget(self.context_model, read_only=read_only)
edit.setText(txt)
self.setCellWidget(row, column, edit)
edit.textChanged.connect(partial(self._packageTextChanged, row, column))
edit.focusOut.connect(partial(self._packageFocusOut, row, column))
edit.focusOutViaKeyPress.connect(partial(self._packageFocusOutViaKeyPress,
row, column))
return edit
def _set_effective_package_cell(self, row, column, request, lock_type):
if row >= self.rowCount():
self.setRowCount(row + 1)
cell = EffectivePackageCellWidget(request, lock_type)
self.setCellWidget(row, column, cell)
def _set_variant_cell(self, row, column, context_model, variant,
reference_variant=None, hide_locks=False,
read_only=False):
if row >= self.rowCount():
self.setRowCount(row + 1)
widget = VariantCellWidget(context_model, variant,
reference_variant=reference_variant,
hide_locks=hide_locks, read_only=read_only)
self.setCellWidget(row, column, widget)
widget._set_stale(column != 1)
def _set_cell_text(self, row, column, txt):
if row >= self.rowCount():
self.setRowCount(row + 1)
if self.cellWidget(row, column):
self.removeCellWidget(row, column)
item = QtWidgets.QTableWidgetItem(txt)
self.setItem(row, column, item)
def _packageTextChanged(self, row, column, txt):
if txt:
if self._set_package_cell(row + 1, column):
self._update_request_column(column, self.context_model)
def _packageFocusOutViaKeyPress(self, row, column, txt):
if txt:
self._set_current_cell(row + 1, column)
else:
widget = self.cellWidget(row + 1, column)
if widget and isinstance(widget, PackageSelectWidget):
self._delete_cell(row, column)
new_request = self.get_request()
self.context_model.set_request(new_request)
self._update_request_column(column, self.context_model)
def _packageFocusOut(self, row, column, txt):
if txt:
self._set_package_cell(row + 1, column)
else:
widget = self.cellWidget(row + 1, column)
if widget and isinstance(widget, PackageSelectWidget):
self._delete_cell(row, column)
new_request = self.get_request()
self.context_model.set_request(new_request)
self._update_request_column(column, self.context_model)
def _delete_cell(self, row, column):
for i in range(row, self.rowCount()):
edit = self.cellWidget(i, column)
if edit and isinstance(edit, PackageSelectWidget):
next_edit = self.cellWidget(i + 1, column)
if next_edit and isinstance(next_edit, PackageSelectWidget):
next_edit.clone_into(edit)
else:
self.removeCellWidget(i, column)
def _trim_trailing_rows(self):
n = 0
for i in reversed(range(self.default_row_count, self.rowCount())):
row_clear = not any(self.cellWidget(i, x)
for x in range(self.columnCount()))
if row_clear:
n += 1
else:
break
if n:
row, column = self.currentRow(), self.currentColumn()
self.setRowCount(self.rowCount() - n)
self._set_current_cell(row, column)
def _set_current_cell(self, row, column):
self.setCurrentCell(row, column)
edit = self.cellWidget(row, column)
if edit:
edit.setFocus() | /rez-2.112.0.tar.gz/rez-2.112.0/src/rezgui/widgets/ContextTableWidget.py | 0.650134 | 0.157752 | ContextTableWidget.py | pypi |
from Qt import QtCore, QtWidgets, QtGui
from rezgui.util import update_font, create_pane
from rez.utils.formatting import readable_time_duration
import math
class Canvas(QtWidgets.QWidget):
secondsHover = QtCore.Signal(int)
secondsClicked = QtCore.Signal(int)
def __init__(self, width, height, parent=None):
super(Canvas, self).__init__(parent)
self.setCursor(QtCore.Qt.CrossCursor)
self.setMouseTracking(True)
self._width = width
self._height = height
def paintEvent(self, event):
rect = self.rect()
w = rect.width()
h = rect.height()
margin = 5
j = h / 4
p = QtGui.QPainter(self)
update_font(p, italic=True)
pal = QtGui.QPalette()
bg_brush = pal.brush(QtGui.QPalette.Active, QtGui.QPalette.Light)
p.fillRect(rect, bg_brush)
p.setPen(QtCore.Qt.DotLine)
p.drawLine(0, j, w, j)
p.drawLine(0, j * 2, w, j * 2)
p.drawLine(0, j * 3, w, j * 3)
p.setPen(pal.color(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText))
p.drawText(margin, j - margin, "days")
p.drawText(margin, j * 2 - margin, "hours")
p.drawText(margin, j * 3 - margin, "minutes")
p.drawText(margin, j * 4 - margin, "seconds")
def leaveEvent(self, event):
self.secondsHover.emit(-1)
def mousePressEvent(self, event):
secs = self._get_seconds(event.pos())
self.secondsClicked.emit(secs)
def mouseMoveEvent(self, event):
secs = self._get_seconds(event.pos())
self.secondsHover.emit(secs)
def sizeHint(self):
return QtCore.QSize(self._width, self._height)
def _get_seconds(self, pos):
rect = self.rect()
x_norm = pos.x() / float(rect.width())
y_norm = min(1.0 - (pos.y() / float(rect.height())), 0.99)
unit = int(y_norm / 0.25)
y_norm -= unit * 0.25
y_norm *= 4.0
x_norm = max(min(x_norm, 0.99), 0.0)
y_norm = max(min(y_norm, 0.99), 0.0)
j = 2.5 * (1.0 - y_norm)
x_pow = 0.5 + (j * j / 2.5)
f = math.pow(x_norm, x_pow)
if unit == 0: # seconds
j = int(1.0 + f * 59)
secs = min(j, 59)
elif unit == 1: # minutes
j = int((1.0 + f * 60) * 60)
secs = min(j, 3600)
elif unit == 2: # hours
j = int((1.0 + f * 24) * 3600)
secs = min(j, 3600 * 24)
else: # days
j = int((1.0 + f * 7) * 3600 * 24)
secs = min(j, 3600 * 24 * 7)
return secs
class TimeSelecterPopup(QtWidgets.QFrame):
secondsClicked = QtCore.Signal(int)
def __init__(self, pivot_widget, width=240, height=160, parent=None):
super(TimeSelecterPopup, self).__init__(parent)
self.setFrameStyle(QtWidgets.QFrame.Panel | QtWidgets.QFrame.Raised)
self.setWindowFlags(QtCore.Qt.Popup)
self.seconds = None
self.label = QtWidgets.QLabel("")
canvas_frame = QtWidgets.QFrame()
canvas_frame.setFrameStyle(QtWidgets.QFrame.Panel | QtWidgets.QFrame.Sunken)
canvas = Canvas(width, height)
layout = QtWidgets.QVBoxLayout()
layout.setSpacing(2)
layout.setContentsMargins(2, 2, 2, 2)
layout.addWidget(canvas)
canvas_frame.setLayout(layout)
create_pane([self.label, canvas_frame], False, compact=True,
parent_widget=self)
self.adjustSize()
pt = pivot_widget.rect().topLeft()
global_pt = pivot_widget.mapToGlobal(pt)
self.move(global_pt - QtCore.QPoint(0, self.height()))
canvas.secondsHover.connect(self._secondsHover)
canvas.secondsClicked.connect(self._secondsClicked)
def _secondsHover(self, seconds):
if seconds == -1:
self.label.setText("")
else:
secs_txt = readable_time_duration(seconds)
self.label.setText("%s ago" % secs_txt)
def _secondsClicked(self, seconds):
self.secondsClicked.emit(seconds)
self.close() | /rez-2.112.0.tar.gz/rez-2.112.0/src/rezgui/widgets/TimeSelecterPopup.py | 0.609757 | 0.165796 | TimeSelecterPopup.py | pypi |
from Qt import QtCore, QtWidgets, QtGui
from rezgui.util import create_pane
class GraphicsView(QtWidgets.QGraphicsView):
def __init__(self, parent=None, max_scale=None):
super(GraphicsView, self).__init__(parent)
self.interactive = True
self.press_pos = None
self.max_scale = max_scale
def mousePressEvent(self, event):
if self.interactive:
self.setCursor(QtCore.Qt.ClosedHandCursor)
self.press_pos = QtGui.QCursor.pos()
self.press_scroll_pos = self._scroll_pos()
else:
event.ignore()
def mouseReleaseEvent(self, event):
if self.interactive:
self.unsetCursor()
else:
event.ignore()
def mouseMoveEvent(self, event):
if self.interactive:
pos = QtGui.QCursor.pos()
pos_delta = pos - self.press_pos
scroll_pos = self.press_scroll_pos - pos_delta
self._set_scroll_pos(scroll_pos)
else:
event.ignore()
def wheelEvent(self, event):
if self.interactive:
scale = 1.0 + (event.delta() * 0.001)
if scale < 1.0:
rect = self.mapToScene(self.rect()).boundingRect()
if rect.width() > self.scene().width() \
and rect.height() > self.scene().height():
# all of image visible in viewport
event.ignore()
return
elif self.max_scale and self.transform().m11() > self.max_scale:
# we're zoomed in really close
event.ignore()
return
self.scale(scale, scale)
else:
event.ignore()
def _scroll_pos(self):
hs = self.horizontalScrollBar()
vs = self.verticalScrollBar()
return QtCore.QPoint(hs.value(), vs.value())
def _set_scroll_pos(self, pos):
hs = self.horizontalScrollBar()
vs = self.verticalScrollBar()
hs.setValue(pos.x())
vs.setValue(pos.y())
class ImageViewerWidget(QtWidgets.QWidget):
def __init__(self, image_file, parent=None):
super(ImageViewerWidget, self).__init__(parent)
self.fit = False
self.prev_scale = 1.0
self.scene = QtWidgets.QGraphicsScene()
image = QtGui.QPixmap(image_file)
self.image_item = self.scene.addPixmap(image)
self.image_item.setTransformationMode(QtCore.Qt.SmoothTransformation)
npix = max(image.width(), image.height())
max_scale = npix / 200.0
self.view = GraphicsView(self.scene, max_scale=max_scale)
create_pane([self.view], False, parent_widget=self)
self.view.setRenderHints(QtGui.QPainter.Antialiasing
| QtGui.QPainter.SmoothPixmapTransform)
self.view.show()
self._fit_in_view()
def resizeEvent(self, event):
if self.fit:
self._fit_in_view()
event.accept()
def fit_to_window(self, enabled):
if enabled != self.fit:
self.fit = enabled
self.view.interactive = not enabled
current_scale = self.view.transform().m11()
if self.fit:
self.prev_scale = current_scale
self._fit_in_view()
else:
factor = self.prev_scale / current_scale
self.view.scale(factor, factor)
def _fit_in_view(self):
self.view.fitInView(self.image_item, QtCore.Qt.KeepAspectRatio) | /rez-2.112.0.tar.gz/rez-2.112.0/src/rezgui/widgets/ImageViewerWidget.py | 0.581184 | 0.168446 | ImageViewerWidget.py | pypi |
from Qt import QtCore, QtWidgets
from functools import partial
class Config(QtCore.QSettings):
"""Persistent application settings.
Methods are also provided for easily attaching widgets to settings.
"""
def __init__(self, default_settings, organization=None, application=None,
parent=None):
super(Config, self).__init__(organization, application, parent)
self.default_settings = default_settings
def value(self, key, type_=None):
"""Get the value of a setting.
If `type` is not provided, the key must be for a known setting,
present in `self.default_settings`. Conversely if `type` IS provided,
the key must be for an unknown setting.
"""
if type_ is None:
default = self._default_value(key)
val = self._value(key, default)
if type(val) == type(default):
return val
else:
return self._convert_value(val, type(default))
else:
val = self._value(key, None)
if val is None:
return None
return self._convert_value(val, type_)
def get(self, key, type_=None):
return self.value(key, type_)
def get_string_list(self, key):
"""Get a list of strings."""
strings = []
size = self.beginReadArray(key)
for i in range(size):
self.setArrayIndex(i)
entry = str(self._value("entry"))
strings.append(entry)
self.endArray()
return strings
def prepend_string_list(self, key, value, max_length_key):
"""Prepend a fixed-length string list with a new string.
The oldest string will be removed from the list. If the string is
already in the list, it is shuffled to the top. Use this to implement
things like a 'most recent files' entry.
"""
max_len = self.get(max_length_key)
strings = self.get_string_list(key)
strings = [value] + [x for x in strings if x != value]
strings = strings[:max_len]
self.beginWriteArray(key)
for i in range(len(strings)):
self.setArrayIndex(i)
self.setValue("entry", strings[i])
self.endArray()
def attach(self, widget, key):
if isinstance(widget, QtWidgets.QComboBox):
self._attach_combobox(widget, key)
elif isinstance(widget, QtWidgets.QCheckBox):
self._attach_checkbox(widget, key)
else:
raise NotImplementedError
def _value(self, key, defaultValue=None):
val = super(Config, self).value(key, defaultValue)
if hasattr(val, "toPyObject"):
val = val.toPyObject()
return val
@classmethod
def _convert_value(cls, value, type_):
if type_ is bool:
return (str(value).lower() == "true")
else:
return type_(value)
def _attach_checkbox(self, widget, key):
if widget.isTristate():
raise NotImplementedError
value = self.value(key)
widget.setCheckState(QtCore.Qt.Checked if value else QtCore.Qt.Unchecked)
widget.stateChanged.connect(
partial(self._checkbox_stateChanged, widget, key))
def _checkbox_stateChanged(self, widget, key):
value = widget.isChecked()
self.setValue(key, value)
def _attach_combobox(self, widget, key):
value = str(self.value(key))
index = widget.findText(value)
if index == -1:
widget.setEditText(value)
else:
widget.setCurrentIndex(index)
widget.currentIndexChanged.connect(
partial(self._combobox_currentIndexChanged, widget, key))
widget.editTextChanged.connect(
partial(self._combobox_editTextChanged, widget, key))
def _combobox_currentIndexChanged(self, widget, key, index):
value = widget.itemText(index)
self.setValue(key, value)
def _combobox_editTextChanged(self, widget, key, txt):
self.setValue(key, txt)
def _default_value(self, key):
keys = key.lstrip('/').split('/')
value = self.default_settings
for k in keys:
try:
value = value[k]
except KeyError:
raise ValueError("No such application setting: %r" % key)
return value | /rez-2.112.0.tar.gz/rez-2.112.0/src/rezgui/objects/Config.py | 0.730386 | 0.182025 | Config.py | pypi |
from __future__ import print_function
from rez.release_hook import ReleaseHook
from rez.utils.logging_ import print_error, print_debug
from rez.utils.amqp import publish_message
from rez.vendor.six import six
from rez.config import config
basestring = six.string_types[0]
class AmqpReleaseHook(ReleaseHook):
"""
Publishes a message to the broker.
The message is a json encoded dictionary of the form -
{
package : {
handle : {},
name : ...
version : ...
user: ... (who released the package)
qualified_name : ...
uri : ...
},
variants : [
{ handle : {} },
{ handle : {} }
]
}
"""
schema_dict = {
"host": basestring,
"userid": basestring,
"password": basestring,
"connect_timeout": int,
"exchange_name": basestring,
"exchange_routing_key": basestring,
"message_delivery_mode": int,
"message_attributes": dict}
@classmethod
def name(cls):
return "amqp"
def __init__(self, source_path):
super(AmqpReleaseHook, self).__init__(source_path)
def post_release(self, user, install_path, variants, **kwargs):
if variants:
package = variants[0].parent
else:
package = self.package
# build the message dict
data = {}
data["package"] = dict(
name=package.name,
version=str(package.version),
qualified_name=package.qualified_name,
uri=package.uri,
handle=package.handle.to_dict())
# FIXME Do this until user added as package attribute
from getpass import getuser
data["package"]["user"] = getuser()
data["variants"] = []
for variant in variants:
variants_data = dict(handle=variant.handle.to_dict())
data["variants"].append(variants_data)
# add message attributes
data.update(self.settings.message_attributes)
self.publish_message(data)
def publish_message(self, data):
if not self.settings.host:
print_error("Did not publish message, host is not specified")
return
routing_key = self.settings.exchange_routing_key
print("Publishing AMQP message on %s..." % routing_key)
publish_message(
host=self.settings.host,
amqp_settings=self.settings,
routing_key=routing_key,
data=data
)
if config.debug("package_release"):
print_debug("Published message: %s" % (data))
def register_plugin():
return AmqpReleaseHook | /rez-2.112.0.tar.gz/rez-2.112.0/src/rezplugins/release_hook/amqp.py | 0.51879 | 0.158891 | amqp.py | pypi |
from __future__ import print_function
from rez.release_vcs import ReleaseVCS
from rez.utils.logging_ import print_error, print_debug
from rez.exceptions import ReleaseVCSError
from shutil import rmtree
import functools
import os.path
import re
class GitReleaseVCSError(ReleaseVCSError):
pass
class GitReleaseVCS(ReleaseVCS):
schema_dict = {
"allow_no_upstream": bool}
@classmethod
def name(cls):
return 'git'
def __init__(self, pkg_root, vcs_root=None):
super(GitReleaseVCS, self).__init__(pkg_root, vcs_root=vcs_root)
self.executable = self.find_executable('git')
try:
self.git("rev-parse")
except ReleaseVCSError:
raise GitReleaseVCSError("%s is not a git repository" %
self.vcs_root)
@classmethod
def is_valid_root(cls, path):
return os.path.isdir(os.path.join(path, '.git'))
@classmethod
def search_parents_for_root(cls):
return True
def git(self, *nargs):
return self._cmd(self.executable, *nargs)
def get_relative_to_remote(self):
"""Return the number of commits we are relative to the remote. Negative
is behind, positive in front, zero means we are matched to remote.
"""
s = self.git("status", "--short", "-b")[0]
r = re.compile(r"\[([^\]]+)\]")
toks = r.findall(s)
if toks:
try:
s2 = toks[-1]
adj, n = s2.split()
assert adj in ("ahead", "behind")
n = int(n)
return -n if adj == "behind" else n
except Exception as e:
raise ReleaseVCSError(
("Problem parsing first line of result of 'git status "
"--short -b' (%s):\n%s") % (s, str(e)))
else:
return 0
def get_local_branch(self):
"""Returns the label of the current local branch."""
return self.git("rev-parse", "--abbrev-ref", "HEAD")[0]
def get_tracking_branch(self):
"""Returns (remote, branch) tuple, or None,None if there is no remote.
"""
try:
remote_uri = self.git("rev-parse", "--abbrev-ref",
"--symbolic-full-name", "@{u}")[0]
return remote_uri.split('/', 1)
except Exception as e:
# capitalization of message changed sometime between git 1.8.3
# and 2.12 - used to be "No upstream", now "no upstream"..
errmsg = str(e).lower()
if "no upstream branch" not in errmsg and \
"no upstream configured" not in errmsg and \
"does not point to a branch" not in errmsg:
raise e
return (None, None)
def validate_repostate(self):
b = self.git("rev-parse", "--is-bare-repository")
if b == "true":
raise ReleaseVCSError("Could not release: bare git repository")
remote, remote_branch = self.get_tracking_branch()
# check for upstream branch
if remote is None and (not self.settings.allow_no_upstream):
raise ReleaseVCSError(
"Release cancelled: there is no upstream branch (git cannot see "
"a remote repo - you should probably FIX THIS FIRST!). To allow "
"the release, set the config entry "
"'plugins.release_vcs.git.allow_no_upstream' to true.")
# check we are releasing from a valid branch
releasable_branches = self.type_settings.releasable_branches
if releasable_branches:
releasable = False
current_branch_name = self.get_local_branch()
for releasable_branch in releasable_branches:
if re.search(releasable_branch, current_branch_name):
releasable = True
break
if not releasable:
raise ReleaseVCSError(
"Could not release: current branch is %s, must match "
"one of: %s"
% (current_branch_name, ', '.join(releasable_branches)))
# check for uncommitted changes
try:
self.git("diff-index", "--quiet", "HEAD")
except ReleaseVCSError:
msg = "Could not release: there are uncommitted changes:\n"
statmsg = self.git("diff-index", "--stat", "HEAD")
msg += '\n'.join(statmsg)
raise ReleaseVCSError(msg)
# check if we are behind/ahead of remote
if remote:
self.git("remote", "update")
n = self.get_relative_to_remote()
if n:
s = "ahead of" if n > 0 else "behind"
remote_uri = '/'.join((remote, remote_branch))
raise ReleaseVCSError(
"Could not release: %d commits %s %s."
% (abs(n), s, remote_uri))
def get_changelog(self, previous_revision=None, max_revisions=None):
prev_commit = None
if previous_revision is not None:
try:
prev_commit = previous_revision["commit"]
except:
if self.package.config.debug("package_release"):
print_debug("couldn't determine previous commit from: %r"
% previous_revision)
args = ["log"]
if max_revisions:
args.extend(["-n", str(max_revisions)])
if prev_commit:
# git returns logs to last common ancestor, so even if previous
# release was from a different branch, this is ok
commit_range = "%s..HEAD" % prev_commit
args.append(commit_range)
stdout = self.git(*args)
return '\n'.join(stdout)
def get_current_revision(self):
doc = dict(commit=self.git("rev-parse", "HEAD")[0])
def _url(op):
origin = doc["tracking_branch"].split('/')[0]
lines = self.git("remote", "-v")
lines = [x for x in lines if origin in x.split()]
lines = [x for x in lines if ("(%s)" % op) in x.split()]
try:
return lines[0].split()[1]
except:
raise ReleaseVCSError("failed to parse %s url from:\n%s"
% (op, '\n'.join(lines)))
def _get(key, fn):
try:
value = fn()
doc[key] = value
return (value is not None)
except Exception as e:
print_error("Error retrieving %s: %s" % (key, str(e)))
return False
def _tracking_branch():
remote, remote_branch = self.get_tracking_branch()
if remote is None:
return None
else:
return "%s/%s" % (remote, remote_branch)
_get("branch", self.get_local_branch)
if _get("tracking_branch", _tracking_branch):
_get("fetch_url", functools.partial(_url, "fetch"))
_get("push_url", functools.partial(_url, "push"))
return doc
def tag_exists(self, tag_name):
tags = self.git("tag")
return (tag_name in tags)
def create_release_tag(self, tag_name, message=None):
if self.tag_exists(tag_name):
return
# create tag
print("Creating tag '%s'..." % tag_name)
args = ["tag", "-a", tag_name]
args += ["-m", message or '']
self.git(*args)
# push tag
remote, remote_branch = self.get_tracking_branch()
if remote is None:
return
remote_uri = '/'.join((remote, remote_branch))
print("Pushing tag '%s' to %s..." % (tag_name, remote_uri))
self.git("push", remote, tag_name)
@classmethod
def export(cls, revision, path):
url = revision["fetch_url"]
commit = revision["commit"]
path_, dirname = os.path.split(path)
gitdir = os.path.join(path, ".git")
with retain_cwd():
os.chdir(path_)
git.clone(url, dirname)
os.chdir(path)
git.checkout(commit)
rmtree(gitdir)
def register_plugin():
return GitReleaseVCS | /rez-2.112.0.tar.gz/rez-2.112.0/src/rezplugins/release_vcs/git.py | 0.57093 | 0.172729 | git.py | pypi |
# Rezbuild
Rezbuild is a python library for build rez packages. Please visit the
[Rez website](https://github.com/nerdvegas/rez) for more information about rez.
点击 [这里](https://gitlab.com/Pili-Pala/rezbuild/-/blob/main/README_zh_CN.md)
访问 Rezbuild 的中文文档。
## Description
This project is a tool to build rez packages. It simplifies the build process.
Rezbuild support build rez package by python wheel, python source archive,
python source, archive file, unix source, macOS pkg and macOS dmg.
## Installation
### requisites
Rezbuild requires the python-3.6+, build-0.3+(lower version did not test) and
pip-18+ to run.
### Install
There are 3 ways to install rezbuild, choose according to your own situation.
#### 1.Install by source for rez(New in rezbuild, or do not have rezbuild in you environment)
If you are new in rezbuild, or there's no other version of rezbuild in you rez
environment, you can use rez to install this package from source. Make sure all
the requirement already installed into you rez environment(python-3.6+,
build-0.3+, pip-18+).
Then, clone this project, cd the source root and run the rez install command:
```shell
git clone git@gitlab.com:Pili-Pala/rezbuild.git
cd rezbuild
rez build -i
```
#### 2.Install by itself(Need rezbuild in you rez repository)
Rezbuild can install by itself. Make sure you have all the requirements are
installed into rez environment, include rezbuild (another version of this
package). Download wheel file from
[PyPI](https://pypi.org/project/rezbuild/#files). Then create a directory like
this:
```text
install_rezbuild/
├── build.py
├── package.py
└── installers/
└── rezbuild-0.14.1-py3-none-any.whl
```
The content of build.py can be like this:
```python
# build.py
from rezbuild.builder import PythonWheelBuilder
if __name__ == '__main__':
builder = PythonWheelBuilder()
builder.build()
```
The content of package.py can be like this:
```python
# package.py
name = "rezbuild"
version = "version of this package you download"
requires = [
"build-0.3+",
"pip-18+",
"python-3.6+",
]
private_build_requires = [
"rezbuild",
]
build_command = 'python {root}/build.py {install}'
def commands():
env.PYTHONPATH.append("{root}/site-packages")
```
Then, run this command in the root directory `rez build -i`.
After that, this package will be installed as a rez package.
#### 3.Install from pypi
Of course, you can install this package from pip
```shell
pip install rezbuild
```
As this package is for rez, install from pypi doesn't make sense in most of the
time.
## Usage
After 2.70.0, rez removed the bez build system. So rezbuild based on
rez-2.70.0.
### Build from python wheel file(PythonWheelBuilder)
I assume that you already know what is rez, how to make a package.py, and now
you want to build a third-party python package come from the internet.
First, add a build file into you package root, just like a file named
`build.py`. The content can be like this:
```python
# Import third-party modules
from rezbuild import PythonWheelBuilder
if __name__ == '__main__':
PythonWheelBuilder().build()
```
Then add variable `build_command` into the `package.py` file,
content should be like this:
`build_command = 'python {root}/build.py {install}'`.
After that, go to [PyPI](https://pypi.org) to download the wheel file and put
the file into `source_root/installers`. The tree should like this:
```text
source_root/
├── build.py
├── package.py
└── installers/
└── the_package_you_want_to_install.whl
```
Finally, change directory to source_root and run the command `rez build -i`,
the package will be installed.
### Build from python source code(PythonSourceBuilder)
PythonSourceBuilder is used to build rez package from python source which
meeting the requirements of Python official structure. The source structure
please refer to
[python official website](https://packaging.python.org/en/latest/tutorials/packaging-projects/).
The source structure should like this:
```text
source_root/
├── build.py
├── package.py
├── pyproject.toml
├── setup.cfg
└── src/
└── module/
└── __init__.py
```
The content of `build.py`:
```python
# Import third-party modules
from rezbuild import PythonSourceBuilder
if __name__ == '__main__':
PythonSourceBuilder().build()
```
Then ensure you already make all the necessary files to build a python package.
`PythonSourceBuilder` will use the official way to build the package.
Then run the command `rez build -i`, the package will be build and installed.
### Build from the python source archive file
Some packages only supply the python source archive file, we can use the
PythonSourceArchiveBuilder builder to build.
### Copy build(CopyBuilder)
Sometimes we don't want to use the official way to build rez package(metadata
will be missing if we don't use the official way), but only copy the code. Use
CopyBuilder can build package by only copy the source code. The default source
path is the folder named `src` under the source root. Pass the path to the root
parameter in build method to custom the source path. build.py file should like
this:
```python
# Import built-in modules
import os
# Import third-party modules
from rezbuild import CopyBuilder
if __name__ == '__main__':
builder = CopyBuilder()
builder.build(root=os.path.join(builder.source_path, "example_package"))
```
### Build from Archive file(ExtractBuilder)
ExtractBuilder can extract the archive file to build rez package.
ExtractBuilder now support zip, tar.gz, tar.xz and 7z.exe.
`build.py`:
```python
# Import third-party modules
from rezbuild import ExtractBuilder
if __name__ == '__main__':
ExtractBuilder().build()
```
Put the archive file into `installers` folder.
```text
source_root/
├── build.py
├── package.py
└── installers/
└── archive.zip
```
### Build from source code(CompileBuilder)
CompileBuilder support use `configure` and `make` command to build package
on Linux and macOS. The arguments of configure is passed by the
extra_config_args parameter of CompileBuilder.build method.
`build.py` 如下:
```python
# Import third-party modules
from rezbuild import CompileBuilder
if __name__ == '__main__':
CompileBuilder().build(extra_config_args=["LDFLAGS=-L/path/to/lib"])
```
Put the source archive file into installers folder.
```text
source_root/
├── build.py
├── package.py
└── installers/
└── git-2.32.0.tar.gz
```
### Build from dmg file(MacOSDmgBuilder)
Make `build.py` like this:
```python
# Import third-party modules
from rezbuild import MacOSDmgBuilder
if __name__ == '__main__':
MacOSDmgBuilder().build()
```
Put archive file into `installers` folder.
```text
source_root/
├── build.py
├── package.py
└── installers/
└── installer.dmg
```
Then run command `rez build -i` from `source_root`.
`MacOSDmgBuilder` will create a shell script in the package root, has a same
with the `.app` folder. Set `create_shell` to `False` to prevent his creation.
For example: `MacOSDmgBuilder().build(create_shell=False)`
### Custom builder
You can customize a builder for you code from base builder. Just make a builder
inherit from RezBuilder and rewrite `custom_build` function. Follow will
introduce all the default builder from rezbuild so that you can use them to
customize you own builder.
#### RezBuilder
`RezBuilder` is the root builder, all the builder inherit from it. It gets the
rez environment, make sure the workspace, install package, and execute you
custom build function.
For example:
```python
# Import built-in modules
import os
import shutil
# Import third-party modules
from rezbuild import RezBuilder
class CustomBuilder(RezBuilder):
def custom_build(self):
shutil.copytree(os.path.join(self.source_path, "src"), self.build_path)
if __name__ == '__main__':
CustomBuilder().build()
```
build function will invoke the custom_build function to build the package.
### Multiple variant
If you need to install a multi-variant package with different installers for
each variant, you can put the installers into the folders that named with the
variant index under the `installers` folder. For example:
```text
git/
|___installers/
|___0
|___PortableGit-2.32.0.2-64-bit.7z.exe
|___1
|___git-2.32.0.tar.xz
|___build.py
|___package.py
```
Rezbuild will get all the installers under the variant folder when building it.
## API
### builder module
### RezBuilder()
`RezBuilder` is the root builder, any other builder is inherited from it.
RezBuilder load the environment variables, makesure the workspace, install the
package and execute the custom build method.
### RezBuilder.build_path
Build path. The rez default directory.
### RezBuilder.install_path
Install path. Default is ~/packages.
### RezBuilder.source_path
The source path.
### RezBuilder.workspace
Workspace. All the files and folders will be copied to the installation path.
### RezBuilder.name
Package name.
### RezBuilder.version
Build version.
### RezBuilder.variant_index
Variant index.
### RezBuilder.build(**kwargs) -> None
Build method, trigger the build process. This method will invoke the custom
build method of the subclass to run the build.
kwargs: Accept all the key word arguments to pass to the custom_build method.
## Versioning
We use [SemVer](http://semver.org/) for versioning. For the versions available,
see the [tags on this repository](https://gitlab.com/Pili-Pala/rezbuild/tags).
## Author
[PiliPala](https://gitlab.com/Pili-Pala)
## License
[GPLv3](https://www.gnu.org/licenses/gpl-3.0.txt)
| /rezbuild-0.15.0.tar.gz/rezbuild-0.15.0/README.md | 0.521227 | 0.737052 | README.md | pypi |
from typing import Any, Dict, Optional, Union
import httpx
from ...client import Client
from ...models.response_category_list import ResponseCategoryList
from ...types import UNSET, Response, Unset
def _get_kwargs(
*,
client: Client,
search: Union[Unset, None, str] = UNSET,
visible: Union[Unset, None, bool] = UNSET,
limit: Union[Unset, None, int] = UNSET,
offset: Union[Unset, None, int] = 0,
) -> Dict[str, Any]:
url = "{}/categories".format(client.base_url)
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
params: Dict[str, Any] = {}
params["search"] = search
params["visible"] = visible
params["limit"] = limit
params["offset"] = offset
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"params": params,
}
def _parse_response(*, response: httpx.Response) -> Optional[ResponseCategoryList]:
if response.status_code == 200:
response_200 = ResponseCategoryList.from_dict(response.json())
return response_200
return None
def _build_response(*, response: httpx.Response) -> Response[ResponseCategoryList]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
*,
client: Client,
search: Union[Unset, None, str] = UNSET,
visible: Union[Unset, None, bool] = UNSET,
limit: Union[Unset, None, int] = UNSET,
offset: Union[Unset, None, int] = 0,
) -> Response[ResponseCategoryList]:
"""Search categories
Load all categories matching a search string. If the search string is empty, all categories will be
returned.
This will only return categories that belong to the company identified from the request's apiKey.
Args:
search (Union[Unset, None, str]):
visible (Union[Unset, None, bool]):
limit (Union[Unset, None, int]):
offset (Union[Unset, None, int]):
Returns:
Response[ResponseCategoryList]
"""
kwargs = _get_kwargs(
client=client,
search=search,
visible=visible,
limit=limit,
offset=offset,
)
response = httpx.request(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
*,
client: Client,
search: Union[Unset, None, str] = UNSET,
visible: Union[Unset, None, bool] = UNSET,
limit: Union[Unset, None, int] = UNSET,
offset: Union[Unset, None, int] = 0,
) -> Optional[ResponseCategoryList]:
"""Search categories
Load all categories matching a search string. If the search string is empty, all categories will be
returned.
This will only return categories that belong to the company identified from the request's apiKey.
Args:
search (Union[Unset, None, str]):
visible (Union[Unset, None, bool]):
limit (Union[Unset, None, int]):
offset (Union[Unset, None, int]):
Returns:
Response[ResponseCategoryList]
"""
return sync_detailed(
client=client,
search=search,
visible=visible,
limit=limit,
offset=offset,
).parsed
async def asyncio_detailed(
*,
client: Client,
search: Union[Unset, None, str] = UNSET,
visible: Union[Unset, None, bool] = UNSET,
limit: Union[Unset, None, int] = UNSET,
offset: Union[Unset, None, int] = 0,
) -> Response[ResponseCategoryList]:
"""Search categories
Load all categories matching a search string. If the search string is empty, all categories will be
returned.
This will only return categories that belong to the company identified from the request's apiKey.
Args:
search (Union[Unset, None, str]):
visible (Union[Unset, None, bool]):
limit (Union[Unset, None, int]):
offset (Union[Unset, None, int]):
Returns:
Response[ResponseCategoryList]
"""
kwargs = _get_kwargs(
client=client,
search=search,
visible=visible,
limit=limit,
offset=offset,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.request(**kwargs)
return _build_response(response=response)
async def asyncio(
*,
client: Client,
search: Union[Unset, None, str] = UNSET,
visible: Union[Unset, None, bool] = UNSET,
limit: Union[Unset, None, int] = UNSET,
offset: Union[Unset, None, int] = 0,
) -> Optional[ResponseCategoryList]:
"""Search categories
Load all categories matching a search string. If the search string is empty, all categories will be
returned.
This will only return categories that belong to the company identified from the request's apiKey.
Args:
search (Union[Unset, None, str]):
visible (Union[Unset, None, bool]):
limit (Union[Unset, None, int]):
offset (Union[Unset, None, int]):
Returns:
Response[ResponseCategoryList]
"""
return (
await asyncio_detailed(
client=client,
search=search,
visible=visible,
limit=limit,
offset=offset,
)
).parsed | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/api/categories/search_categories.py | 0.949867 | 0.172764 | search_categories.py | pypi |
from typing import Any, Dict, Optional, Union
import httpx
from ...client import Client
from ...models.response_booking import ResponseBooking
from ...types import UNSET, Response, Unset
def _get_kwargs(
order_number: str,
*,
client: Client,
send_notifications: Union[Unset, None, bool] = UNSET,
) -> Dict[str, Any]:
url = "{}/bookings/{orderNumber}".format(client.base_url, orderNumber=order_number)
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
params: Dict[str, Any] = {}
params["sendNotifications"] = send_notifications
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
return {
"method": "delete",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"params": params,
}
def _parse_response(*, response: httpx.Response) -> Optional[ResponseBooking]:
if response.status_code == 200:
response_200 = ResponseBooking.from_dict(response.json())
return response_200
return None
def _build_response(*, response: httpx.Response) -> Response[ResponseBooking]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
order_number: str,
*,
client: Client,
send_notifications: Union[Unset, None, bool] = UNSET,
) -> Response[ResponseBooking]:
"""Cancel booking
Cancel an existing booking and send notifications about the cancellation. In case of an Automated
Payment booking, will also refund payment.
Args:
order_number (str):
send_notifications (Union[Unset, None, bool]):
Returns:
Response[ResponseBooking]
"""
kwargs = _get_kwargs(
order_number=order_number,
client=client,
send_notifications=send_notifications,
)
response = httpx.request(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
order_number: str,
*,
client: Client,
send_notifications: Union[Unset, None, bool] = UNSET,
) -> Optional[ResponseBooking]:
"""Cancel booking
Cancel an existing booking and send notifications about the cancellation. In case of an Automated
Payment booking, will also refund payment.
Args:
order_number (str):
send_notifications (Union[Unset, None, bool]):
Returns:
Response[ResponseBooking]
"""
return sync_detailed(
order_number=order_number,
client=client,
send_notifications=send_notifications,
).parsed
async def asyncio_detailed(
order_number: str,
*,
client: Client,
send_notifications: Union[Unset, None, bool] = UNSET,
) -> Response[ResponseBooking]:
"""Cancel booking
Cancel an existing booking and send notifications about the cancellation. In case of an Automated
Payment booking, will also refund payment.
Args:
order_number (str):
send_notifications (Union[Unset, None, bool]):
Returns:
Response[ResponseBooking]
"""
kwargs = _get_kwargs(
order_number=order_number,
client=client,
send_notifications=send_notifications,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.request(**kwargs)
return _build_response(response=response)
async def asyncio(
order_number: str,
*,
client: Client,
send_notifications: Union[Unset, None, bool] = UNSET,
) -> Optional[ResponseBooking]:
"""Cancel booking
Cancel an existing booking and send notifications about the cancellation. In case of an Automated
Payment booking, will also refund payment.
Args:
order_number (str):
send_notifications (Union[Unset, None, bool]):
Returns:
Response[ResponseBooking]
"""
return (
await asyncio_detailed(
order_number=order_number,
client=client,
send_notifications=send_notifications,
)
).parsed | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/api/bookings/cancel_booking.py | 0.901932 | 0.173463 | cancel_booking.py | pypi |
from typing import Any, Dict, Optional
import httpx
from ...client import Client
from ...models.booking_create import BookingCreate
from ...models.response_booking import ResponseBooking
from ...types import Response
def _get_kwargs(
*,
client: Client,
json_body: BookingCreate,
) -> Dict[str, Any]:
url = "{}/bookings".format(client.base_url)
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
json_json_body = json_body.to_dict()
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"json": json_json_body,
}
def _parse_response(*, response: httpx.Response) -> Optional[ResponseBooking]:
if response.status_code == 200:
response_200 = ResponseBooking.from_dict(response.json())
return response_200
return None
def _build_response(*, response: httpx.Response) -> Response[ResponseBooking]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
*,
client: Client,
json_body: BookingCreate,
) -> Response[ResponseBooking]:
"""Create booking
Create a new booking. Many of payload fields are not required and will be calculated if not
specified. Please check the example request payloads for different booking scenarios.
Args:
json_body (BookingCreate): Booking create object used to create a booking in Rezdy's
system. Lists all the possible fields for all product types and scenarios. Most of them
are not required when sending a new booking.<br>A single Booking can be used to book
multiple products, each of them being a BookingItem. All the products of one booking have
to be from the same supplier.
Returns:
Response[ResponseBooking]
"""
kwargs = _get_kwargs(
client=client,
json_body=json_body,
)
response = httpx.request(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
*,
client: Client,
json_body: BookingCreate,
) -> Optional[ResponseBooking]:
"""Create booking
Create a new booking. Many of payload fields are not required and will be calculated if not
specified. Please check the example request payloads for different booking scenarios.
Args:
json_body (BookingCreate): Booking create object used to create a booking in Rezdy's
system. Lists all the possible fields for all product types and scenarios. Most of them
are not required when sending a new booking.<br>A single Booking can be used to book
multiple products, each of them being a BookingItem. All the products of one booking have
to be from the same supplier.
Returns:
Response[ResponseBooking]
"""
return sync_detailed(
client=client,
json_body=json_body,
).parsed
async def asyncio_detailed(
*,
client: Client,
json_body: BookingCreate,
) -> Response[ResponseBooking]:
"""Create booking
Create a new booking. Many of payload fields are not required and will be calculated if not
specified. Please check the example request payloads for different booking scenarios.
Args:
json_body (BookingCreate): Booking create object used to create a booking in Rezdy's
system. Lists all the possible fields for all product types and scenarios. Most of them
are not required when sending a new booking.<br>A single Booking can be used to book
multiple products, each of them being a BookingItem. All the products of one booking have
to be from the same supplier.
Returns:
Response[ResponseBooking]
"""
kwargs = _get_kwargs(
client=client,
json_body=json_body,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.request(**kwargs)
return _build_response(response=response)
async def asyncio(
*,
client: Client,
json_body: BookingCreate,
) -> Optional[ResponseBooking]:
"""Create booking
Create a new booking. Many of payload fields are not required and will be calculated if not
specified. Please check the example request payloads for different booking scenarios.
Args:
json_body (BookingCreate): Booking create object used to create a booking in Rezdy's
system. Lists all the possible fields for all product types and scenarios. Most of them
are not required when sending a new booking.<br>A single Booking can be used to book
multiple products, each of them being a BookingItem. All the products of one booking have
to be from the same supplier.
Returns:
Response[ResponseBooking]
"""
return (
await asyncio_detailed(
client=client,
json_body=json_body,
)
).parsed | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/api/bookings/create_booking.py | 0.901252 | 0.257007 | create_booking.py | pypi |
from typing import Any, Dict, Optional
import httpx
from ...client import Client
from ...models.booking_update import BookingUpdate
from ...models.response_booking import ResponseBooking
from ...types import Response
def _get_kwargs(
order_number: str,
*,
client: Client,
json_body: BookingUpdate,
) -> Dict[str, Any]:
url = "{}/bookings/{orderNumber}".format(client.base_url, orderNumber=order_number)
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
json_json_body = json_body.to_dict()
return {
"method": "put",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"json": json_json_body,
}
def _parse_response(*, response: httpx.Response) -> Optional[ResponseBooking]:
if response.status_code == 200:
response_200 = ResponseBooking.from_dict(response.json())
return response_200
return None
def _build_response(*, response: httpx.Response) -> Response[ResponseBooking]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
order_number: str,
*,
client: Client,
json_body: BookingUpdate,
) -> Response[ResponseBooking]:
"""Update booking
When implementing booking update take into consideration below:
- only certain fields can currently be updated using API
- only manual payment bookings can be updated
- update booking in API triggers webhooks and e-mail notifications in the same way as the order
update through UI
- good practice is to retrieve the full booking object either from create or get booking response,
update the necessary fields and pass it whole back to the booking update service. In the future we
might support updates of additional fields e.g. add and delete of participants. If you don't send
the participants array in the request, we will recognize it as the participants deletion and remove
the participant from the existing order
- order of the items in arrays have to be preserved for the following fields \"items\",
\"participants\", since no ids are exposed in the API and thus are matched based on the position in
the array again the existing booking object
- agent can update supplier orders only if the supplier allow them to *edit orders* when sharing
their products
The service method does not support a partial update, **full booking object, as it was retrieved
from the booking create or search services**, has to be send back to the request payload.
Otherwise, the properties or relations which <i>are currently supported (see below)</i> and they
are not sent, will be deleted.
**Order of the items in arrays have to be preserved for the following fields 'items',
'participants'.**
**Currently supported** fields are:
- Booking.customer - all customer data can be updated
- Booking.field - all 'per booking' booking fields values
- Booking.item.participant.field - all 'per participant' booking fields values
- Booking.resellerComments - both the booking agent and the supplier can update the booking
resellerComments
- Booking.resellerReference - both the booking agent and the supplier can update the booking
resellerReference
- Booking.items.pickupLocation.locationName - both the booking agent and the supplier can update the
booking pickup location
For the sample requests provided in the right panel, consider the booking object below being
retrieved from a POST order or GET order methods:
```
{
\"requestStatus\": {
\"success\": true,
\"version\": \"v1\"
},
\"booking\": {
\"orderNumber\": \"RSKCJ1K\",
\"status\": \"CONFIRMED\",
\"supplierId\": 61,
\"supplierName\": \"SUPPLIER_PREMIUM_AU\",
\"customer\": {
\"id\": 2,
\"firstName\": \"Dusan\",
\"lastName\": \"Zahoransky\",
\"name\": \"Dusan Zahoransky\",
\"email\": \"sample@test.com\"
},
\"items\": [
{
\"productName\": \"activity i session seats pp adult 100f\",
\"productCode\": \"P123456\",
\"startTime\": \"2017-01-19T09:00:00Z\",
\"endTime\": \"2017-01-19T11:00:00Z\",
\"startTimeLocal\": \"2017-01-19 20:00:00\",
\"endTimeLocal\": \"2017-01-19 22:00:00\",
\"quantities\": [
{
\"optionLabel\": \"Adult\",
\"optionPrice\": 100,
\"value\": 1
}
],
\"totalQuantity\": 1,
\"amount\": 100,
\"extras\": [
],
\"participants\": [
{
\"fields\": [
{
\"label\": \"First Name\",
\"value\": \"Janko\",
\"requiredPerParticipant\": false,
\"requiredPerBooking\": false,
\"visiblePerParticipant\": false,
\"visiblePerBooking\": false
},
{
\"label\": \"Last Name\",
\"value\": \"Hrasko\",
\"requiredPerParticipant\": false,
\"requiredPerBooking\": false,
\"visiblePerParticipant\": false,
\"visiblePerBooking\": false
}
]
}
],
\"subtotal\": 100,
\"vouchers\": [
]
}
],
\"totalAmount\": 100,
\"totalCurrency\": \"AUD\",
\"totalPaid\": 0,
\"totalDue\": 100,
\"dateCreated\": \"2017-01-19T03:36:18.462Z\",
\"dateConfirmed\": \"2017-01-19T03:36:18.462Z\",
\"payments\": [
],
\"fields\": [
{
\"label\": \"Special Requirements\",
\"value\": \"No meat meal option\",
\"requiredPerParticipant\": false,
\"requiredPerBooking\": false,
\"visiblePerParticipant\": false,
\"visiblePerBooking\": false
}
],
\"source\": \"API\",
\"vouchers\": [
]
}
}
```
Args:
order_number (str):
json_body (BookingUpdate): Booking update object used to update a booking in Rezdy's
system.
Returns:
Response[ResponseBooking]
"""
kwargs = _get_kwargs(
order_number=order_number,
client=client,
json_body=json_body,
)
response = httpx.request(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
order_number: str,
*,
client: Client,
json_body: BookingUpdate,
) -> Optional[ResponseBooking]:
"""Update booking
When implementing booking update take into consideration below:
- only certain fields can currently be updated using API
- only manual payment bookings can be updated
- update booking in API triggers webhooks and e-mail notifications in the same way as the order
update through UI
- good practice is to retrieve the full booking object either from create or get booking response,
update the necessary fields and pass it whole back to the booking update service. In the future we
might support updates of additional fields e.g. add and delete of participants. If you don't send
the participants array in the request, we will recognize it as the participants deletion and remove
the participant from the existing order
- order of the items in arrays have to be preserved for the following fields \"items\",
\"participants\", since no ids are exposed in the API and thus are matched based on the position in
the array again the existing booking object
- agent can update supplier orders only if the supplier allow them to *edit orders* when sharing
their products
The service method does not support a partial update, **full booking object, as it was retrieved
from the booking create or search services**, has to be send back to the request payload.
Otherwise, the properties or relations which <i>are currently supported (see below)</i> and they
are not sent, will be deleted.
**Order of the items in arrays have to be preserved for the following fields 'items',
'participants'.**
**Currently supported** fields are:
- Booking.customer - all customer data can be updated
- Booking.field - all 'per booking' booking fields values
- Booking.item.participant.field - all 'per participant' booking fields values
- Booking.resellerComments - both the booking agent and the supplier can update the booking
resellerComments
- Booking.resellerReference - both the booking agent and the supplier can update the booking
resellerReference
- Booking.items.pickupLocation.locationName - both the booking agent and the supplier can update the
booking pickup location
For the sample requests provided in the right panel, consider the booking object below being
retrieved from a POST order or GET order methods:
```
{
\"requestStatus\": {
\"success\": true,
\"version\": \"v1\"
},
\"booking\": {
\"orderNumber\": \"RSKCJ1K\",
\"status\": \"CONFIRMED\",
\"supplierId\": 61,
\"supplierName\": \"SUPPLIER_PREMIUM_AU\",
\"customer\": {
\"id\": 2,
\"firstName\": \"Dusan\",
\"lastName\": \"Zahoransky\",
\"name\": \"Dusan Zahoransky\",
\"email\": \"sample@test.com\"
},
\"items\": [
{
\"productName\": \"activity i session seats pp adult 100f\",
\"productCode\": \"P123456\",
\"startTime\": \"2017-01-19T09:00:00Z\",
\"endTime\": \"2017-01-19T11:00:00Z\",
\"startTimeLocal\": \"2017-01-19 20:00:00\",
\"endTimeLocal\": \"2017-01-19 22:00:00\",
\"quantities\": [
{
\"optionLabel\": \"Adult\",
\"optionPrice\": 100,
\"value\": 1
}
],
\"totalQuantity\": 1,
\"amount\": 100,
\"extras\": [
],
\"participants\": [
{
\"fields\": [
{
\"label\": \"First Name\",
\"value\": \"Janko\",
\"requiredPerParticipant\": false,
\"requiredPerBooking\": false,
\"visiblePerParticipant\": false,
\"visiblePerBooking\": false
},
{
\"label\": \"Last Name\",
\"value\": \"Hrasko\",
\"requiredPerParticipant\": false,
\"requiredPerBooking\": false,
\"visiblePerParticipant\": false,
\"visiblePerBooking\": false
}
]
}
],
\"subtotal\": 100,
\"vouchers\": [
]
}
],
\"totalAmount\": 100,
\"totalCurrency\": \"AUD\",
\"totalPaid\": 0,
\"totalDue\": 100,
\"dateCreated\": \"2017-01-19T03:36:18.462Z\",
\"dateConfirmed\": \"2017-01-19T03:36:18.462Z\",
\"payments\": [
],
\"fields\": [
{
\"label\": \"Special Requirements\",
\"value\": \"No meat meal option\",
\"requiredPerParticipant\": false,
\"requiredPerBooking\": false,
\"visiblePerParticipant\": false,
\"visiblePerBooking\": false
}
],
\"source\": \"API\",
\"vouchers\": [
]
}
}
```
Args:
order_number (str):
json_body (BookingUpdate): Booking update object used to update a booking in Rezdy's
system.
Returns:
Response[ResponseBooking]
"""
return sync_detailed(
order_number=order_number,
client=client,
json_body=json_body,
).parsed
async def asyncio_detailed(
order_number: str,
*,
client: Client,
json_body: BookingUpdate,
) -> Response[ResponseBooking]:
"""Update booking
When implementing booking update take into consideration below:
- only certain fields can currently be updated using API
- only manual payment bookings can be updated
- update booking in API triggers webhooks and e-mail notifications in the same way as the order
update through UI
- good practice is to retrieve the full booking object either from create or get booking response,
update the necessary fields and pass it whole back to the booking update service. In the future we
might support updates of additional fields e.g. add and delete of participants. If you don't send
the participants array in the request, we will recognize it as the participants deletion and remove
the participant from the existing order
- order of the items in arrays have to be preserved for the following fields \"items\",
\"participants\", since no ids are exposed in the API and thus are matched based on the position in
the array again the existing booking object
- agent can update supplier orders only if the supplier allow them to *edit orders* when sharing
their products
The service method does not support a partial update, **full booking object, as it was retrieved
from the booking create or search services**, has to be send back to the request payload.
Otherwise, the properties or relations which <i>are currently supported (see below)</i> and they
are not sent, will be deleted.
**Order of the items in arrays have to be preserved for the following fields 'items',
'participants'.**
**Currently supported** fields are:
- Booking.customer - all customer data can be updated
- Booking.field - all 'per booking' booking fields values
- Booking.item.participant.field - all 'per participant' booking fields values
- Booking.resellerComments - both the booking agent and the supplier can update the booking
resellerComments
- Booking.resellerReference - both the booking agent and the supplier can update the booking
resellerReference
- Booking.items.pickupLocation.locationName - both the booking agent and the supplier can update the
booking pickup location
For the sample requests provided in the right panel, consider the booking object below being
retrieved from a POST order or GET order methods:
```
{
\"requestStatus\": {
\"success\": true,
\"version\": \"v1\"
},
\"booking\": {
\"orderNumber\": \"RSKCJ1K\",
\"status\": \"CONFIRMED\",
\"supplierId\": 61,
\"supplierName\": \"SUPPLIER_PREMIUM_AU\",
\"customer\": {
\"id\": 2,
\"firstName\": \"Dusan\",
\"lastName\": \"Zahoransky\",
\"name\": \"Dusan Zahoransky\",
\"email\": \"sample@test.com\"
},
\"items\": [
{
\"productName\": \"activity i session seats pp adult 100f\",
\"productCode\": \"P123456\",
\"startTime\": \"2017-01-19T09:00:00Z\",
\"endTime\": \"2017-01-19T11:00:00Z\",
\"startTimeLocal\": \"2017-01-19 20:00:00\",
\"endTimeLocal\": \"2017-01-19 22:00:00\",
\"quantities\": [
{
\"optionLabel\": \"Adult\",
\"optionPrice\": 100,
\"value\": 1
}
],
\"totalQuantity\": 1,
\"amount\": 100,
\"extras\": [
],
\"participants\": [
{
\"fields\": [
{
\"label\": \"First Name\",
\"value\": \"Janko\",
\"requiredPerParticipant\": false,
\"requiredPerBooking\": false,
\"visiblePerParticipant\": false,
\"visiblePerBooking\": false
},
{
\"label\": \"Last Name\",
\"value\": \"Hrasko\",
\"requiredPerParticipant\": false,
\"requiredPerBooking\": false,
\"visiblePerParticipant\": false,
\"visiblePerBooking\": false
}
]
}
],
\"subtotal\": 100,
\"vouchers\": [
]
}
],
\"totalAmount\": 100,
\"totalCurrency\": \"AUD\",
\"totalPaid\": 0,
\"totalDue\": 100,
\"dateCreated\": \"2017-01-19T03:36:18.462Z\",
\"dateConfirmed\": \"2017-01-19T03:36:18.462Z\",
\"payments\": [
],
\"fields\": [
{
\"label\": \"Special Requirements\",
\"value\": \"No meat meal option\",
\"requiredPerParticipant\": false,
\"requiredPerBooking\": false,
\"visiblePerParticipant\": false,
\"visiblePerBooking\": false
}
],
\"source\": \"API\",
\"vouchers\": [
]
}
}
```
Args:
order_number (str):
json_body (BookingUpdate): Booking update object used to update a booking in Rezdy's
system.
Returns:
Response[ResponseBooking]
"""
kwargs = _get_kwargs(
order_number=order_number,
client=client,
json_body=json_body,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.request(**kwargs)
return _build_response(response=response)
async def asyncio(
order_number: str,
*,
client: Client,
json_body: BookingUpdate,
) -> Optional[ResponseBooking]:
"""Update booking
When implementing booking update take into consideration below:
- only certain fields can currently be updated using API
- only manual payment bookings can be updated
- update booking in API triggers webhooks and e-mail notifications in the same way as the order
update through UI
- good practice is to retrieve the full booking object either from create or get booking response,
update the necessary fields and pass it whole back to the booking update service. In the future we
might support updates of additional fields e.g. add and delete of participants. If you don't send
the participants array in the request, we will recognize it as the participants deletion and remove
the participant from the existing order
- order of the items in arrays have to be preserved for the following fields \"items\",
\"participants\", since no ids are exposed in the API and thus are matched based on the position in
the array again the existing booking object
- agent can update supplier orders only if the supplier allow them to *edit orders* when sharing
their products
The service method does not support a partial update, **full booking object, as it was retrieved
from the booking create or search services**, has to be send back to the request payload.
Otherwise, the properties or relations which <i>are currently supported (see below)</i> and they
are not sent, will be deleted.
**Order of the items in arrays have to be preserved for the following fields 'items',
'participants'.**
**Currently supported** fields are:
- Booking.customer - all customer data can be updated
- Booking.field - all 'per booking' booking fields values
- Booking.item.participant.field - all 'per participant' booking fields values
- Booking.resellerComments - both the booking agent and the supplier can update the booking
resellerComments
- Booking.resellerReference - both the booking agent and the supplier can update the booking
resellerReference
- Booking.items.pickupLocation.locationName - both the booking agent and the supplier can update the
booking pickup location
For the sample requests provided in the right panel, consider the booking object below being
retrieved from a POST order or GET order methods:
```
{
\"requestStatus\": {
\"success\": true,
\"version\": \"v1\"
},
\"booking\": {
\"orderNumber\": \"RSKCJ1K\",
\"status\": \"CONFIRMED\",
\"supplierId\": 61,
\"supplierName\": \"SUPPLIER_PREMIUM_AU\",
\"customer\": {
\"id\": 2,
\"firstName\": \"Dusan\",
\"lastName\": \"Zahoransky\",
\"name\": \"Dusan Zahoransky\",
\"email\": \"sample@test.com\"
},
\"items\": [
{
\"productName\": \"activity i session seats pp adult 100f\",
\"productCode\": \"P123456\",
\"startTime\": \"2017-01-19T09:00:00Z\",
\"endTime\": \"2017-01-19T11:00:00Z\",
\"startTimeLocal\": \"2017-01-19 20:00:00\",
\"endTimeLocal\": \"2017-01-19 22:00:00\",
\"quantities\": [
{
\"optionLabel\": \"Adult\",
\"optionPrice\": 100,
\"value\": 1
}
],
\"totalQuantity\": 1,
\"amount\": 100,
\"extras\": [
],
\"participants\": [
{
\"fields\": [
{
\"label\": \"First Name\",
\"value\": \"Janko\",
\"requiredPerParticipant\": false,
\"requiredPerBooking\": false,
\"visiblePerParticipant\": false,
\"visiblePerBooking\": false
},
{
\"label\": \"Last Name\",
\"value\": \"Hrasko\",
\"requiredPerParticipant\": false,
\"requiredPerBooking\": false,
\"visiblePerParticipant\": false,
\"visiblePerBooking\": false
}
]
}
],
\"subtotal\": 100,
\"vouchers\": [
]
}
],
\"totalAmount\": 100,
\"totalCurrency\": \"AUD\",
\"totalPaid\": 0,
\"totalDue\": 100,
\"dateCreated\": \"2017-01-19T03:36:18.462Z\",
\"dateConfirmed\": \"2017-01-19T03:36:18.462Z\",
\"payments\": [
],
\"fields\": [
{
\"label\": \"Special Requirements\",
\"value\": \"No meat meal option\",
\"requiredPerParticipant\": false,
\"requiredPerBooking\": false,
\"visiblePerParticipant\": false,
\"visiblePerBooking\": false
}
],
\"source\": \"API\",
\"vouchers\": [
]
}
}
```
Args:
order_number (str):
json_body (BookingUpdate): Booking update object used to update a booking in Rezdy's
system.
Returns:
Response[ResponseBooking]
"""
return (
await asyncio_detailed(
order_number=order_number,
client=client,
json_body=json_body,
)
).parsed | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/api/bookings/update_booking.py | 0.853333 | 0.465752 | update_booking.py | pypi |
from typing import Any, Dict, Optional
import httpx
from ...client import Client
from ...models.response_rate import ResponseRate
from ...types import Response
def _get_kwargs(
rate_id: int,
product_code: str,
*,
client: Client,
) -> Dict[str, Any]:
url = "{}/rates/{rateId}/products/{productCode}".format(client.base_url, rateId=rate_id, productCode=product_code)
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
return {
"method": "delete",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
}
def _parse_response(*, response: httpx.Response) -> Optional[ResponseRate]:
if response.status_code == 200:
response_200 = ResponseRate.from_dict(response.json())
return response_200
return None
def _build_response(*, response: httpx.Response) -> Response[ResponseRate]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
rate_id: int,
product_code: str,
*,
client: Client,
) -> Response[ResponseRate]:
"""Remove product
Removes a product from the specified rate
Args:
rate_id (int):
product_code (str):
Returns:
Response[ResponseRate]
"""
kwargs = _get_kwargs(
rate_id=rate_id,
product_code=product_code,
client=client,
)
response = httpx.request(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
rate_id: int,
product_code: str,
*,
client: Client,
) -> Optional[ResponseRate]:
"""Remove product
Removes a product from the specified rate
Args:
rate_id (int):
product_code (str):
Returns:
Response[ResponseRate]
"""
return sync_detailed(
rate_id=rate_id,
product_code=product_code,
client=client,
).parsed
async def asyncio_detailed(
rate_id: int,
product_code: str,
*,
client: Client,
) -> Response[ResponseRate]:
"""Remove product
Removes a product from the specified rate
Args:
rate_id (int):
product_code (str):
Returns:
Response[ResponseRate]
"""
kwargs = _get_kwargs(
rate_id=rate_id,
product_code=product_code,
client=client,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.request(**kwargs)
return _build_response(response=response)
async def asyncio(
rate_id: int,
product_code: str,
*,
client: Client,
) -> Optional[ResponseRate]:
"""Remove product
Removes a product from the specified rate
Args:
rate_id (int):
product_code (str):
Returns:
Response[ResponseRate]
"""
return (
await asyncio_detailed(
rate_id=rate_id,
product_code=product_code,
client=client,
)
).parsed | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/api/rates/remove_product_1.py | 0.901691 | 0.152947 | remove_product_1.py | pypi |
from typing import Any, Dict, Optional
import httpx
from ...client import Client
from ...models.response_rate import ResponseRate
from ...types import Response
def _get_kwargs(
rate_id: int,
*,
client: Client,
) -> Dict[str, Any]:
url = "{}/rates/{rateId}".format(client.base_url, rateId=rate_id)
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
}
def _parse_response(*, response: httpx.Response) -> Optional[ResponseRate]:
if response.status_code == 200:
response_200 = ResponseRate.from_dict(response.json())
return response_200
return None
def _build_response(*, response: httpx.Response) -> Response[ResponseRate]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
rate_id: int,
*,
client: Client,
) -> Response[ResponseRate]:
"""Get rate
Retrieves a rate based on its ID
Args:
rate_id (int):
Returns:
Response[ResponseRate]
"""
kwargs = _get_kwargs(
rate_id=rate_id,
client=client,
)
response = httpx.request(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
rate_id: int,
*,
client: Client,
) -> Optional[ResponseRate]:
"""Get rate
Retrieves a rate based on its ID
Args:
rate_id (int):
Returns:
Response[ResponseRate]
"""
return sync_detailed(
rate_id=rate_id,
client=client,
).parsed
async def asyncio_detailed(
rate_id: int,
*,
client: Client,
) -> Response[ResponseRate]:
"""Get rate
Retrieves a rate based on its ID
Args:
rate_id (int):
Returns:
Response[ResponseRate]
"""
kwargs = _get_kwargs(
rate_id=rate_id,
client=client,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.request(**kwargs)
return _build_response(response=response)
async def asyncio(
rate_id: int,
*,
client: Client,
) -> Optional[ResponseRate]:
"""Get rate
Retrieves a rate based on its ID
Args:
rate_id (int):
Returns:
Response[ResponseRate]
"""
return (
await asyncio_detailed(
rate_id=rate_id,
client=client,
)
).parsed | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/api/rates/get_rates_by_id.py | 0.885477 | 0.185136 | get_rates_by_id.py | pypi |
from typing import Any, Dict, Optional
import httpx
from ...client import Client
from ...models.product_rate import ProductRate
from ...models.response_rate import ResponseRate
from ...types import Response
def _get_kwargs(
rate_id: int,
product_code: str,
*,
client: Client,
json_body: ProductRate,
) -> Dict[str, Any]:
url = "{}/rates/{rateId}/products/{productCode}".format(client.base_url, rateId=rate_id, productCode=product_code)
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
json_json_body = json_body.to_dict()
return {
"method": "put",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"json": json_json_body,
}
def _parse_response(*, response: httpx.Response) -> Optional[ResponseRate]:
if response.status_code == 200:
response_200 = ResponseRate.from_dict(response.json())
return response_200
return None
def _build_response(*, response: httpx.Response) -> Response[ResponseRate]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
rate_id: int,
product_code: str,
*,
client: Client,
json_body: ProductRate,
) -> Response[ResponseRate]:
"""Add product
Adds a product to the specified rate
Args:
rate_id (int):
product_code (str):
json_body (ProductRate): A ProductRate is used to map a product and its associated value
commission
Returns:
Response[ResponseRate]
"""
kwargs = _get_kwargs(
rate_id=rate_id,
product_code=product_code,
client=client,
json_body=json_body,
)
response = httpx.request(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
rate_id: int,
product_code: str,
*,
client: Client,
json_body: ProductRate,
) -> Optional[ResponseRate]:
"""Add product
Adds a product to the specified rate
Args:
rate_id (int):
product_code (str):
json_body (ProductRate): A ProductRate is used to map a product and its associated value
commission
Returns:
Response[ResponseRate]
"""
return sync_detailed(
rate_id=rate_id,
product_code=product_code,
client=client,
json_body=json_body,
).parsed
async def asyncio_detailed(
rate_id: int,
product_code: str,
*,
client: Client,
json_body: ProductRate,
) -> Response[ResponseRate]:
"""Add product
Adds a product to the specified rate
Args:
rate_id (int):
product_code (str):
json_body (ProductRate): A ProductRate is used to map a product and its associated value
commission
Returns:
Response[ResponseRate]
"""
kwargs = _get_kwargs(
rate_id=rate_id,
product_code=product_code,
client=client,
json_body=json_body,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.request(**kwargs)
return _build_response(response=response)
async def asyncio(
rate_id: int,
product_code: str,
*,
client: Client,
json_body: ProductRate,
) -> Optional[ResponseRate]:
"""Add product
Adds a product to the specified rate
Args:
rate_id (int):
product_code (str):
json_body (ProductRate): A ProductRate is used to map a product and its associated value
commission
Returns:
Response[ResponseRate]
"""
return (
await asyncio_detailed(
rate_id=rate_id,
product_code=product_code,
client=client,
json_body=json_body,
)
).parsed | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/api/rates/add_product_1.py | 0.88578 | 0.153867 | add_product_1.py | pypi |
from typing import Any, Dict, Optional, Union
import httpx
from ...client import Client
from ...models.response_rate_list import ResponseRateList
from ...types import UNSET, Response, Unset
def _get_kwargs(
*,
client: Client,
rate_name: Union[Unset, None, str] = UNSET,
product_code: Union[Unset, None, str] = UNSET,
) -> Dict[str, Any]:
url = "{}/rates/search".format(client.base_url)
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
params: Dict[str, Any] = {}
params["rateName"] = rate_name
params["productCode"] = product_code
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"params": params,
}
def _parse_response(*, response: httpx.Response) -> Optional[ResponseRateList]:
if response.status_code == 200:
response_200 = ResponseRateList.from_dict(response.json())
return response_200
return None
def _build_response(*, response: httpx.Response) -> Response[ResponseRateList]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
*,
client: Client,
rate_name: Union[Unset, None, str] = UNSET,
product_code: Union[Unset, None, str] = UNSET,
) -> Response[ResponseRateList]:
"""Search rates
Searches rates based on rate name and product code. If rateName and productCode are not specified,
then it will return all rates belonging to the supplier
Args:
rate_name (Union[Unset, None, str]):
product_code (Union[Unset, None, str]):
Returns:
Response[ResponseRateList]
"""
kwargs = _get_kwargs(
client=client,
rate_name=rate_name,
product_code=product_code,
)
response = httpx.request(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
*,
client: Client,
rate_name: Union[Unset, None, str] = UNSET,
product_code: Union[Unset, None, str] = UNSET,
) -> Optional[ResponseRateList]:
"""Search rates
Searches rates based on rate name and product code. If rateName and productCode are not specified,
then it will return all rates belonging to the supplier
Args:
rate_name (Union[Unset, None, str]):
product_code (Union[Unset, None, str]):
Returns:
Response[ResponseRateList]
"""
return sync_detailed(
client=client,
rate_name=rate_name,
product_code=product_code,
).parsed
async def asyncio_detailed(
*,
client: Client,
rate_name: Union[Unset, None, str] = UNSET,
product_code: Union[Unset, None, str] = UNSET,
) -> Response[ResponseRateList]:
"""Search rates
Searches rates based on rate name and product code. If rateName and productCode are not specified,
then it will return all rates belonging to the supplier
Args:
rate_name (Union[Unset, None, str]):
product_code (Union[Unset, None, str]):
Returns:
Response[ResponseRateList]
"""
kwargs = _get_kwargs(
client=client,
rate_name=rate_name,
product_code=product_code,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.request(**kwargs)
return _build_response(response=response)
async def asyncio(
*,
client: Client,
rate_name: Union[Unset, None, str] = UNSET,
product_code: Union[Unset, None, str] = UNSET,
) -> Optional[ResponseRateList]:
"""Search rates
Searches rates based on rate name and product code. If rateName and productCode are not specified,
then it will return all rates belonging to the supplier
Args:
rate_name (Union[Unset, None, str]):
product_code (Union[Unset, None, str]):
Returns:
Response[ResponseRateList]
"""
return (
await asyncio_detailed(
client=client,
rate_name=rate_name,
product_code=product_code,
)
).parsed | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/api/rates/search_rates_by_name_and_product_code.py | 0.925819 | 0.202759 | search_rates_by_name_and_product_code.py | pypi |
from typing import Any, Dict, Optional
import httpx
from ...client import Client
from ...models.product_update_request import ProductUpdateRequest
from ...models.response_product import ResponseProduct
from ...types import Response
def _get_kwargs(
product_code: str,
*,
client: Client,
json_body: ProductUpdateRequest,
) -> Dict[str, Any]:
url = "{}/products/{productCode}".format(client.base_url, productCode=product_code)
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
json_json_body = json_body.to_dict()
return {
"method": "put",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"json": json_json_body,
}
def _parse_response(*, response: httpx.Response) -> Optional[ResponseProduct]:
if response.status_code == 200:
response_200 = ResponseProduct.from_dict(response.json())
return response_200
return None
def _build_response(*, response: httpx.Response) -> Response[ResponseProduct]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
product_code: str,
*,
client: Client,
json_body: ProductUpdateRequest,
) -> Response[ResponseProduct]:
"""Update product
Updates a product.
When updating price options, the full list of existing price options must be supplied in the update.
Otherwise the system will remove any missing price options in the request from the product.
For instance, if a product has 2 price options Adult and Child but the update request only contains
Adult, the Child price option will be removed from the product.<br>
Adding a price option works the same way. If the update request contains an extra price option, it
will be added to the product.
When price option values are updated via API, this will override all existing price in availability
(session) to reflect the product price.
If a different price in calendar/session is required to the product price, please make the changes
to the product directly in your Rezdy account and select `do not change session price` in the page
that follows after saving your changes.,
Args:
product_code (str):
json_body (ProductUpdateRequest): Partial product model containing all fields which are
currently supported in product create via API.
Returns:
Response[ResponseProduct]
"""
kwargs = _get_kwargs(
product_code=product_code,
client=client,
json_body=json_body,
)
response = httpx.request(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
product_code: str,
*,
client: Client,
json_body: ProductUpdateRequest,
) -> Optional[ResponseProduct]:
"""Update product
Updates a product.
When updating price options, the full list of existing price options must be supplied in the update.
Otherwise the system will remove any missing price options in the request from the product.
For instance, if a product has 2 price options Adult and Child but the update request only contains
Adult, the Child price option will be removed from the product.<br>
Adding a price option works the same way. If the update request contains an extra price option, it
will be added to the product.
When price option values are updated via API, this will override all existing price in availability
(session) to reflect the product price.
If a different price in calendar/session is required to the product price, please make the changes
to the product directly in your Rezdy account and select `do not change session price` in the page
that follows after saving your changes.,
Args:
product_code (str):
json_body (ProductUpdateRequest): Partial product model containing all fields which are
currently supported in product create via API.
Returns:
Response[ResponseProduct]
"""
return sync_detailed(
product_code=product_code,
client=client,
json_body=json_body,
).parsed
async def asyncio_detailed(
product_code: str,
*,
client: Client,
json_body: ProductUpdateRequest,
) -> Response[ResponseProduct]:
"""Update product
Updates a product.
When updating price options, the full list of existing price options must be supplied in the update.
Otherwise the system will remove any missing price options in the request from the product.
For instance, if a product has 2 price options Adult and Child but the update request only contains
Adult, the Child price option will be removed from the product.<br>
Adding a price option works the same way. If the update request contains an extra price option, it
will be added to the product.
When price option values are updated via API, this will override all existing price in availability
(session) to reflect the product price.
If a different price in calendar/session is required to the product price, please make the changes
to the product directly in your Rezdy account and select `do not change session price` in the page
that follows after saving your changes.,
Args:
product_code (str):
json_body (ProductUpdateRequest): Partial product model containing all fields which are
currently supported in product create via API.
Returns:
Response[ResponseProduct]
"""
kwargs = _get_kwargs(
product_code=product_code,
client=client,
json_body=json_body,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.request(**kwargs)
return _build_response(response=response)
async def asyncio(
product_code: str,
*,
client: Client,
json_body: ProductUpdateRequest,
) -> Optional[ResponseProduct]:
"""Update product
Updates a product.
When updating price options, the full list of existing price options must be supplied in the update.
Otherwise the system will remove any missing price options in the request from the product.
For instance, if a product has 2 price options Adult and Child but the update request only contains
Adult, the Child price option will be removed from the product.<br>
Adding a price option works the same way. If the update request contains an extra price option, it
will be added to the product.
When price option values are updated via API, this will override all existing price in availability
(session) to reflect the product price.
If a different price in calendar/session is required to the product price, please make the changes
to the product directly in your Rezdy account and select `do not change session price` in the page
that follows after saving your changes.,
Args:
product_code (str):
json_body (ProductUpdateRequest): Partial product model containing all fields which are
currently supported in product create via API.
Returns:
Response[ResponseProduct]
"""
return (
await asyncio_detailed(
product_code=product_code,
client=client,
json_body=json_body,
)
).parsed | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/api/products/update_product.py | 0.864711 | 0.267893 | update_product.py | pypi |
from typing import Any, Dict, Optional
import httpx
from ...client import Client
from ...models.add_product_image_multipart_data import AddProductImageMultipartData
from ...models.response_image import ResponseImage
from ...types import Response
def _get_kwargs(
product_code: str,
*,
client: Client,
multipart_data: AddProductImageMultipartData,
) -> Dict[str, Any]:
url = "{}/products/{productCode}/images".format(client.base_url, productCode=product_code)
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
multipart_multipart_data = multipart_data.to_multipart()
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"files": multipart_multipart_data,
}
def _parse_response(*, response: httpx.Response) -> Optional[ResponseImage]:
if response.status_code == 200:
response_200 = ResponseImage.from_dict(response.json())
return response_200
return None
def _build_response(*, response: httpx.Response) -> Response[ResponseImage]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
product_code: str,
*,
client: Client,
multipart_data: AddProductImageMultipartData,
) -> Response[ResponseImage]:
"""Add product image
First, a product has to be created using```POST /products```, the response contain a product object
upon a successful creation. Use the product code to for the add/delete images URLs.
Use a standard file upload request (multipart form data) with a file attachment, parameter name is
`file` as the call payload, and also a mandatory filename. If you have multiple images, you need to
make one separate call for each image.
A successful response contains generated image URLs including different image dimension and the
image Id. Use the image Id to delete the image, if you want to remove it from the product.
### Request example
File has to be specified either in the attachment, part of the form-data parameter 'file', or in the
attachment, as a form-data parameter 'filename', which is common format for PHP frameworks, which
are sending an array of attachments.
Request:
```
POST https://api.rezdy.com/latest/products/P12345/images?apiKey=123456789XYZ
```
content-type=[multipart/form-data; boundary=----WebKitFormBoundarymDtt4W0lhmAsKFkZ]
```
------WebKitFormBoundarymDtt4W0lhmAsKFkZ
Content-Disposition: form-data; name=\"file\"; filename=\"myImage.png\"
Content-Type: image/png
...
IMAGE BINARY DATA
...
------WebKitFormBoundarymDtt4W0lhmAsKFkZ--
```
Args:
product_code (str):
multipart_data (AddProductImageMultipartData):
Returns:
Response[ResponseImage]
"""
kwargs = _get_kwargs(
product_code=product_code,
client=client,
multipart_data=multipart_data,
)
response = httpx.request(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
product_code: str,
*,
client: Client,
multipart_data: AddProductImageMultipartData,
) -> Optional[ResponseImage]:
"""Add product image
First, a product has to be created using```POST /products```, the response contain a product object
upon a successful creation. Use the product code to for the add/delete images URLs.
Use a standard file upload request (multipart form data) with a file attachment, parameter name is
`file` as the call payload, and also a mandatory filename. If you have multiple images, you need to
make one separate call for each image.
A successful response contains generated image URLs including different image dimension and the
image Id. Use the image Id to delete the image, if you want to remove it from the product.
### Request example
File has to be specified either in the attachment, part of the form-data parameter 'file', or in the
attachment, as a form-data parameter 'filename', which is common format for PHP frameworks, which
are sending an array of attachments.
Request:
```
POST https://api.rezdy.com/latest/products/P12345/images?apiKey=123456789XYZ
```
content-type=[multipart/form-data; boundary=----WebKitFormBoundarymDtt4W0lhmAsKFkZ]
```
------WebKitFormBoundarymDtt4W0lhmAsKFkZ
Content-Disposition: form-data; name=\"file\"; filename=\"myImage.png\"
Content-Type: image/png
...
IMAGE BINARY DATA
...
------WebKitFormBoundarymDtt4W0lhmAsKFkZ--
```
Args:
product_code (str):
multipart_data (AddProductImageMultipartData):
Returns:
Response[ResponseImage]
"""
return sync_detailed(
product_code=product_code,
client=client,
multipart_data=multipart_data,
).parsed
async def asyncio_detailed(
product_code: str,
*,
client: Client,
multipart_data: AddProductImageMultipartData,
) -> Response[ResponseImage]:
"""Add product image
First, a product has to be created using```POST /products```, the response contain a product object
upon a successful creation. Use the product code to for the add/delete images URLs.
Use a standard file upload request (multipart form data) with a file attachment, parameter name is
`file` as the call payload, and also a mandatory filename. If you have multiple images, you need to
make one separate call for each image.
A successful response contains generated image URLs including different image dimension and the
image Id. Use the image Id to delete the image, if you want to remove it from the product.
### Request example
File has to be specified either in the attachment, part of the form-data parameter 'file', or in the
attachment, as a form-data parameter 'filename', which is common format for PHP frameworks, which
are sending an array of attachments.
Request:
```
POST https://api.rezdy.com/latest/products/P12345/images?apiKey=123456789XYZ
```
content-type=[multipart/form-data; boundary=----WebKitFormBoundarymDtt4W0lhmAsKFkZ]
```
------WebKitFormBoundarymDtt4W0lhmAsKFkZ
Content-Disposition: form-data; name=\"file\"; filename=\"myImage.png\"
Content-Type: image/png
...
IMAGE BINARY DATA
...
------WebKitFormBoundarymDtt4W0lhmAsKFkZ--
```
Args:
product_code (str):
multipart_data (AddProductImageMultipartData):
Returns:
Response[ResponseImage]
"""
kwargs = _get_kwargs(
product_code=product_code,
client=client,
multipart_data=multipart_data,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.request(**kwargs)
return _build_response(response=response)
async def asyncio(
product_code: str,
*,
client: Client,
multipart_data: AddProductImageMultipartData,
) -> Optional[ResponseImage]:
"""Add product image
First, a product has to be created using```POST /products```, the response contain a product object
upon a successful creation. Use the product code to for the add/delete images URLs.
Use a standard file upload request (multipart form data) with a file attachment, parameter name is
`file` as the call payload, and also a mandatory filename. If you have multiple images, you need to
make one separate call for each image.
A successful response contains generated image URLs including different image dimension and the
image Id. Use the image Id to delete the image, if you want to remove it from the product.
### Request example
File has to be specified either in the attachment, part of the form-data parameter 'file', or in the
attachment, as a form-data parameter 'filename', which is common format for PHP frameworks, which
are sending an array of attachments.
Request:
```
POST https://api.rezdy.com/latest/products/P12345/images?apiKey=123456789XYZ
```
content-type=[multipart/form-data; boundary=----WebKitFormBoundarymDtt4W0lhmAsKFkZ]
```
------WebKitFormBoundarymDtt4W0lhmAsKFkZ
Content-Disposition: form-data; name=\"file\"; filename=\"myImage.png\"
Content-Type: image/png
...
IMAGE BINARY DATA
...
------WebKitFormBoundarymDtt4W0lhmAsKFkZ--
```
Args:
product_code (str):
multipart_data (AddProductImageMultipartData):
Returns:
Response[ResponseImage]
"""
return (
await asyncio_detailed(
product_code=product_code,
client=client,
multipart_data=multipart_data,
)
).parsed | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/api/products/add_product_image.py | 0.888342 | 0.63358 | add_product_image.py | pypi |
from typing import Any, Dict, Optional, Union
import httpx
from ...client import Client
from ...models.response_product_list import ResponseProductList
from ...types import UNSET, Response, Unset
def _get_kwargs(
*,
client: Client,
search: Union[Unset, None, str] = UNSET,
limit: Union[Unset, None, int] = UNSET,
offset: Union[Unset, None, int] = 0,
) -> Dict[str, Any]:
url = "{}/products".format(client.base_url)
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
params: Dict[str, Any] = {}
params["search"] = search
params["limit"] = limit
params["offset"] = offset
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"params": params,
}
def _parse_response(*, response: httpx.Response) -> Optional[ResponseProductList]:
if response.status_code == 200:
response_200 = ResponseProductList.from_dict(response.json())
return response_200
return None
def _build_response(*, response: httpx.Response) -> Response[ResponseProductList]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
*,
client: Client,
search: Union[Unset, None, str] = UNSET,
limit: Union[Unset, None, int] = UNSET,
offset: Union[Unset, None, int] = 0,
) -> Response[ResponseProductList]:
"""Search products
Searches a product that matches a search string.<br> Load all products matching a search string. If
the search string is empty, all your products will be returned.<br>
Use this service when acting as a supplier, to load your own products.<br>
If you're acting as an agent, use the /products/marketplace service<br>
Args:
search (Union[Unset, None, str]):
limit (Union[Unset, None, int]):
offset (Union[Unset, None, int]):
Returns:
Response[ResponseProductList]
"""
kwargs = _get_kwargs(
client=client,
search=search,
limit=limit,
offset=offset,
)
response = httpx.request(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
*,
client: Client,
search: Union[Unset, None, str] = UNSET,
limit: Union[Unset, None, int] = UNSET,
offset: Union[Unset, None, int] = 0,
) -> Optional[ResponseProductList]:
"""Search products
Searches a product that matches a search string.<br> Load all products matching a search string. If
the search string is empty, all your products will be returned.<br>
Use this service when acting as a supplier, to load your own products.<br>
If you're acting as an agent, use the /products/marketplace service<br>
Args:
search (Union[Unset, None, str]):
limit (Union[Unset, None, int]):
offset (Union[Unset, None, int]):
Returns:
Response[ResponseProductList]
"""
return sync_detailed(
client=client,
search=search,
limit=limit,
offset=offset,
).parsed
async def asyncio_detailed(
*,
client: Client,
search: Union[Unset, None, str] = UNSET,
limit: Union[Unset, None, int] = UNSET,
offset: Union[Unset, None, int] = 0,
) -> Response[ResponseProductList]:
"""Search products
Searches a product that matches a search string.<br> Load all products matching a search string. If
the search string is empty, all your products will be returned.<br>
Use this service when acting as a supplier, to load your own products.<br>
If you're acting as an agent, use the /products/marketplace service<br>
Args:
search (Union[Unset, None, str]):
limit (Union[Unset, None, int]):
offset (Union[Unset, None, int]):
Returns:
Response[ResponseProductList]
"""
kwargs = _get_kwargs(
client=client,
search=search,
limit=limit,
offset=offset,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.request(**kwargs)
return _build_response(response=response)
async def asyncio(
*,
client: Client,
search: Union[Unset, None, str] = UNSET,
limit: Union[Unset, None, int] = UNSET,
offset: Union[Unset, None, int] = 0,
) -> Optional[ResponseProductList]:
"""Search products
Searches a product that matches a search string.<br> Load all products matching a search string. If
the search string is empty, all your products will be returned.<br>
Use this service when acting as a supplier, to load your own products.<br>
If you're acting as an agent, use the /products/marketplace service<br>
Args:
search (Union[Unset, None, str]):
limit (Union[Unset, None, int]):
offset (Union[Unset, None, int]):
Returns:
Response[ResponseProductList]
"""
return (
await asyncio_detailed(
client=client,
search=search,
limit=limit,
offset=offset,
)
).parsed | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/api/products/search_products.py | 0.923782 | 0.167729 | search_products.py | pypi |
from typing import Any, Dict, List, Optional, Union
import httpx
from ...client import Client
from ...models.response_session_list import ResponseSessionList
from ...types import UNSET, Response, Unset
def _get_kwargs(
*,
client: Client,
product_code: List[str],
start_time: Union[Unset, None, str] = UNSET,
end_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
end_time_local: Union[Unset, None, str] = UNSET,
min_availability: Union[Unset, None, int] = UNSET,
limit: Union[Unset, None, int] = UNSET,
offset: Union[Unset, None, int] = 0,
) -> Dict[str, Any]:
url = "{}/availability".format(client.base_url)
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
params: Dict[str, Any] = {}
json_product_code = product_code
params["productCode"] = json_product_code
params["startTime"] = start_time
params["endTime"] = end_time
params["startTimeLocal"] = start_time_local
params["endTimeLocal"] = end_time_local
params["minAvailability"] = min_availability
params["limit"] = limit
params["offset"] = offset
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"params": params,
}
def _parse_response(*, response: httpx.Response) -> Optional[ResponseSessionList]:
if response.status_code == 200:
response_200 = ResponseSessionList.from_dict(response.json())
return response_200
return None
def _build_response(*, response: httpx.Response) -> Response[ResponseSessionList]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
*,
client: Client,
product_code: List[str],
start_time: Union[Unset, None, str] = UNSET,
end_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
end_time_local: Union[Unset, None, str] = UNSET,
min_availability: Union[Unset, None, int] = UNSET,
limit: Union[Unset, None, int] = UNSET,
offset: Union[Unset, None, int] = 0,
) -> Response[ResponseSessionList]:
"""Search availability
This service returns availability information for a specific date range. The service response
contains a list of sessions, including their availability and pricing details.<br>
<p>Pricing in the session can be different than the pricing of the products, in a case of a dynamic
pricing when a supplier overrides a price for a specific session or a ticket type.</p>
<p>In case of multiple products sharing this session, a session will contain price overrides for all
of the shared products. Therefore it is necessary to filer only the price options matching the
chosen product code on the client side, when displaying available price options to a customer.</p>
Args:
product_code (List[str]):
start_time (Union[Unset, None, str]):
end_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
end_time_local (Union[Unset, None, str]):
min_availability (Union[Unset, None, int]):
limit (Union[Unset, None, int]):
offset (Union[Unset, None, int]):
Returns:
Response[ResponseSessionList]
"""
kwargs = _get_kwargs(
client=client,
product_code=product_code,
start_time=start_time,
end_time=end_time,
start_time_local=start_time_local,
end_time_local=end_time_local,
min_availability=min_availability,
limit=limit,
offset=offset,
)
response = httpx.request(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
*,
client: Client,
product_code: List[str],
start_time: Union[Unset, None, str] = UNSET,
end_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
end_time_local: Union[Unset, None, str] = UNSET,
min_availability: Union[Unset, None, int] = UNSET,
limit: Union[Unset, None, int] = UNSET,
offset: Union[Unset, None, int] = 0,
) -> Optional[ResponseSessionList]:
"""Search availability
This service returns availability information for a specific date range. The service response
contains a list of sessions, including their availability and pricing details.<br>
<p>Pricing in the session can be different than the pricing of the products, in a case of a dynamic
pricing when a supplier overrides a price for a specific session or a ticket type.</p>
<p>In case of multiple products sharing this session, a session will contain price overrides for all
of the shared products. Therefore it is necessary to filer only the price options matching the
chosen product code on the client side, when displaying available price options to a customer.</p>
Args:
product_code (List[str]):
start_time (Union[Unset, None, str]):
end_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
end_time_local (Union[Unset, None, str]):
min_availability (Union[Unset, None, int]):
limit (Union[Unset, None, int]):
offset (Union[Unset, None, int]):
Returns:
Response[ResponseSessionList]
"""
return sync_detailed(
client=client,
product_code=product_code,
start_time=start_time,
end_time=end_time,
start_time_local=start_time_local,
end_time_local=end_time_local,
min_availability=min_availability,
limit=limit,
offset=offset,
).parsed
async def asyncio_detailed(
*,
client: Client,
product_code: List[str],
start_time: Union[Unset, None, str] = UNSET,
end_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
end_time_local: Union[Unset, None, str] = UNSET,
min_availability: Union[Unset, None, int] = UNSET,
limit: Union[Unset, None, int] = UNSET,
offset: Union[Unset, None, int] = 0,
) -> Response[ResponseSessionList]:
"""Search availability
This service returns availability information for a specific date range. The service response
contains a list of sessions, including their availability and pricing details.<br>
<p>Pricing in the session can be different than the pricing of the products, in a case of a dynamic
pricing when a supplier overrides a price for a specific session or a ticket type.</p>
<p>In case of multiple products sharing this session, a session will contain price overrides for all
of the shared products. Therefore it is necessary to filer only the price options matching the
chosen product code on the client side, when displaying available price options to a customer.</p>
Args:
product_code (List[str]):
start_time (Union[Unset, None, str]):
end_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
end_time_local (Union[Unset, None, str]):
min_availability (Union[Unset, None, int]):
limit (Union[Unset, None, int]):
offset (Union[Unset, None, int]):
Returns:
Response[ResponseSessionList]
"""
kwargs = _get_kwargs(
client=client,
product_code=product_code,
start_time=start_time,
end_time=end_time,
start_time_local=start_time_local,
end_time_local=end_time_local,
min_availability=min_availability,
limit=limit,
offset=offset,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.request(**kwargs)
return _build_response(response=response)
async def asyncio(
*,
client: Client,
product_code: List[str],
start_time: Union[Unset, None, str] = UNSET,
end_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
end_time_local: Union[Unset, None, str] = UNSET,
min_availability: Union[Unset, None, int] = UNSET,
limit: Union[Unset, None, int] = UNSET,
offset: Union[Unset, None, int] = 0,
) -> Optional[ResponseSessionList]:
"""Search availability
This service returns availability information for a specific date range. The service response
contains a list of sessions, including their availability and pricing details.<br>
<p>Pricing in the session can be different than the pricing of the products, in a case of a dynamic
pricing when a supplier overrides a price for a specific session or a ticket type.</p>
<p>In case of multiple products sharing this session, a session will contain price overrides for all
of the shared products. Therefore it is necessary to filer only the price options matching the
chosen product code on the client side, when displaying available price options to a customer.</p>
Args:
product_code (List[str]):
start_time (Union[Unset, None, str]):
end_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
end_time_local (Union[Unset, None, str]):
min_availability (Union[Unset, None, int]):
limit (Union[Unset, None, int]):
offset (Union[Unset, None, int]):
Returns:
Response[ResponseSessionList]
"""
return (
await asyncio_detailed(
client=client,
product_code=product_code,
start_time=start_time,
end_time=end_time,
start_time_local=start_time_local,
end_time_local=end_time_local,
min_availability=min_availability,
limit=limit,
offset=offset,
)
).parsed | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/api/availability/check_availability.py | 0.919962 | 0.203114 | check_availability.py | pypi |
from typing import Any, Dict, Optional
import httpx
from ...client import Client
from ...models.response_session_list import ResponseSessionList
from ...models.session_update_batch_request import SessionUpdateBatchRequest
from ...types import Response
def _get_kwargs(
*,
client: Client,
json_body: SessionUpdateBatchRequest,
) -> Dict[str, Any]:
url = "{}/availability/batch".format(client.base_url)
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
json_json_body = json_body.to_dict()
return {
"method": "post",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"json": json_json_body,
}
def _parse_response(*, response: httpx.Response) -> Optional[ResponseSessionList]:
if response.status_code == 200:
response_200 = ResponseSessionList.from_dict(response.json())
return response_200
return None
def _build_response(*, response: httpx.Response) -> Response[ResponseSessionList]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
*,
client: Client,
json_body: SessionUpdateBatchRequest,
) -> Response[ResponseSessionList]:
"""Update availability batch
Batch Update availability for a specific product and time period.
This will update availability for all the sessions that start in that time range. <br>
Product code is optional, all sessions for all products will be updated if it is empty.
You can use this service to blackout periods (I.e. set availability to 0 for a full day).
Maximum range is one week.
This can only update sessions from products with InventoryMode = SESSION_SEATS.
```
{
startTime: 2014-11-11T00:00:00Z,
endTime: 2014-11-11T23:59:59Z,
productCode: P123456,
seatsAvailable: 0
}
```
OR
```
{
startTime: 2014-11-11T00:00:00Z,
endTime: 2014-11-11T23:59:59Z,
productCode: P123456,
seats: 30
}
```
OR
```
{
startTime: 2014-11-11T00:00:00Z,
endTime: 2014-11-11T23:59:59Z,
productCode: P123456,
priceOptions: [
{
price: 90,
label: Adult
}
]
}
```
If you send `seatsAvailable`, sessions will be updated for the current availability to become this
number. <br>
If you send `seats`, the total seats capacity of sessions will be updated, regardless of how many
are already booked. <br>
If you send both, only `seatsAvailable` will be used. <br>
If you send priceOptions, they will override the default price options of the existing session.
Args:
json_body (SessionUpdateBatchRequest): Batch update session request data.
Returns:
Response[ResponseSessionList]
"""
kwargs = _get_kwargs(
client=client,
json_body=json_body,
)
response = httpx.request(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
*,
client: Client,
json_body: SessionUpdateBatchRequest,
) -> Optional[ResponseSessionList]:
"""Update availability batch
Batch Update availability for a specific product and time period.
This will update availability for all the sessions that start in that time range. <br>
Product code is optional, all sessions for all products will be updated if it is empty.
You can use this service to blackout periods (I.e. set availability to 0 for a full day).
Maximum range is one week.
This can only update sessions from products with InventoryMode = SESSION_SEATS.
```
{
startTime: 2014-11-11T00:00:00Z,
endTime: 2014-11-11T23:59:59Z,
productCode: P123456,
seatsAvailable: 0
}
```
OR
```
{
startTime: 2014-11-11T00:00:00Z,
endTime: 2014-11-11T23:59:59Z,
productCode: P123456,
seats: 30
}
```
OR
```
{
startTime: 2014-11-11T00:00:00Z,
endTime: 2014-11-11T23:59:59Z,
productCode: P123456,
priceOptions: [
{
price: 90,
label: Adult
}
]
}
```
If you send `seatsAvailable`, sessions will be updated for the current availability to become this
number. <br>
If you send `seats`, the total seats capacity of sessions will be updated, regardless of how many
are already booked. <br>
If you send both, only `seatsAvailable` will be used. <br>
If you send priceOptions, they will override the default price options of the existing session.
Args:
json_body (SessionUpdateBatchRequest): Batch update session request data.
Returns:
Response[ResponseSessionList]
"""
return sync_detailed(
client=client,
json_body=json_body,
).parsed
async def asyncio_detailed(
*,
client: Client,
json_body: SessionUpdateBatchRequest,
) -> Response[ResponseSessionList]:
"""Update availability batch
Batch Update availability for a specific product and time period.
This will update availability for all the sessions that start in that time range. <br>
Product code is optional, all sessions for all products will be updated if it is empty.
You can use this service to blackout periods (I.e. set availability to 0 for a full day).
Maximum range is one week.
This can only update sessions from products with InventoryMode = SESSION_SEATS.
```
{
startTime: 2014-11-11T00:00:00Z,
endTime: 2014-11-11T23:59:59Z,
productCode: P123456,
seatsAvailable: 0
}
```
OR
```
{
startTime: 2014-11-11T00:00:00Z,
endTime: 2014-11-11T23:59:59Z,
productCode: P123456,
seats: 30
}
```
OR
```
{
startTime: 2014-11-11T00:00:00Z,
endTime: 2014-11-11T23:59:59Z,
productCode: P123456,
priceOptions: [
{
price: 90,
label: Adult
}
]
}
```
If you send `seatsAvailable`, sessions will be updated for the current availability to become this
number. <br>
If you send `seats`, the total seats capacity of sessions will be updated, regardless of how many
are already booked. <br>
If you send both, only `seatsAvailable` will be used. <br>
If you send priceOptions, they will override the default price options of the existing session.
Args:
json_body (SessionUpdateBatchRequest): Batch update session request data.
Returns:
Response[ResponseSessionList]
"""
kwargs = _get_kwargs(
client=client,
json_body=json_body,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.request(**kwargs)
return _build_response(response=response)
async def asyncio(
*,
client: Client,
json_body: SessionUpdateBatchRequest,
) -> Optional[ResponseSessionList]:
"""Update availability batch
Batch Update availability for a specific product and time period.
This will update availability for all the sessions that start in that time range. <br>
Product code is optional, all sessions for all products will be updated if it is empty.
You can use this service to blackout periods (I.e. set availability to 0 for a full day).
Maximum range is one week.
This can only update sessions from products with InventoryMode = SESSION_SEATS.
```
{
startTime: 2014-11-11T00:00:00Z,
endTime: 2014-11-11T23:59:59Z,
productCode: P123456,
seatsAvailable: 0
}
```
OR
```
{
startTime: 2014-11-11T00:00:00Z,
endTime: 2014-11-11T23:59:59Z,
productCode: P123456,
seats: 30
}
```
OR
```
{
startTime: 2014-11-11T00:00:00Z,
endTime: 2014-11-11T23:59:59Z,
productCode: P123456,
priceOptions: [
{
price: 90,
label: Adult
}
]
}
```
If you send `seatsAvailable`, sessions will be updated for the current availability to become this
number. <br>
If you send `seats`, the total seats capacity of sessions will be updated, regardless of how many
are already booked. <br>
If you send both, only `seatsAvailable` will be used. <br>
If you send priceOptions, they will override the default price options of the existing session.
Args:
json_body (SessionUpdateBatchRequest): Batch update session request data.
Returns:
Response[ResponseSessionList]
"""
return (
await asyncio_detailed(
client=client,
json_body=json_body,
)
).parsed | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/api/availability/batch_update_availability.py | 0.932091 | 0.490419 | batch_update_availability.py | pypi |
from typing import Any, Dict, Optional, Union
import httpx
from ...client import Client
from ...models.response_no_data import ResponseNoData
from ...types import UNSET, Response, Unset
def _get_kwargs(
*,
client: Client,
order_number: str,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
checkin: Union[Unset, None, bool] = UNSET,
) -> Dict[str, Any]:
url = "{}/manifest/checkinOrderSession".format(client.base_url)
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
params: Dict[str, Any] = {}
params["orderNumber"] = order_number
params["productCode"] = product_code
params["startTime"] = start_time
params["startTimeLocal"] = start_time_local
params["checkin"] = checkin
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
return {
"method": "delete",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"params": params,
}
def _parse_response(*, response: httpx.Response) -> Optional[ResponseNoData]:
if response.status_code == 200:
response_200 = ResponseNoData.from_dict(response.json())
return response_200
return None
def _build_response(*, response: httpx.Response) -> Response[ResponseNoData]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
*,
client: Client,
order_number: str,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
checkin: Union[Unset, None, bool] = UNSET,
) -> Response[ResponseNoData]:
"""Remove order item check-in
Retrieves the Check-in status. Checks if everyone in the whole session was checked in. The session
is identified by product code and start time (or start time local).<br>
Only available for the supplier API.<br>
Args:
order_number (str):
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
checkin (Union[Unset, None, bool]):
Returns:
Response[ResponseNoData]
"""
kwargs = _get_kwargs(
client=client,
order_number=order_number,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
checkin=checkin,
)
response = httpx.request(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
*,
client: Client,
order_number: str,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
checkin: Union[Unset, None, bool] = UNSET,
) -> Optional[ResponseNoData]:
"""Remove order item check-in
Retrieves the Check-in status. Checks if everyone in the whole session was checked in. The session
is identified by product code and start time (or start time local).<br>
Only available for the supplier API.<br>
Args:
order_number (str):
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
checkin (Union[Unset, None, bool]):
Returns:
Response[ResponseNoData]
"""
return sync_detailed(
client=client,
order_number=order_number,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
checkin=checkin,
).parsed
async def asyncio_detailed(
*,
client: Client,
order_number: str,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
checkin: Union[Unset, None, bool] = UNSET,
) -> Response[ResponseNoData]:
"""Remove order item check-in
Retrieves the Check-in status. Checks if everyone in the whole session was checked in. The session
is identified by product code and start time (or start time local).<br>
Only available for the supplier API.<br>
Args:
order_number (str):
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
checkin (Union[Unset, None, bool]):
Returns:
Response[ResponseNoData]
"""
kwargs = _get_kwargs(
client=client,
order_number=order_number,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
checkin=checkin,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.request(**kwargs)
return _build_response(response=response)
async def asyncio(
*,
client: Client,
order_number: str,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
checkin: Union[Unset, None, bool] = UNSET,
) -> Optional[ResponseNoData]:
"""Remove order item check-in
Retrieves the Check-in status. Checks if everyone in the whole session was checked in. The session
is identified by product code and start time (or start time local).<br>
Only available for the supplier API.<br>
Args:
order_number (str):
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
checkin (Union[Unset, None, bool]):
Returns:
Response[ResponseNoData]
"""
return (
await asyncio_detailed(
client=client,
order_number=order_number,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
checkin=checkin,
)
).parsed | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/api/manifest/cancel_checkin_order_session.py | 0.90674 | 0.166269 | cancel_checkin_order_session.py | pypi |
from typing import Any, Dict, Optional, Union
import httpx
from ...client import Client
from ...models.response_no_data import ResponseNoData
from ...types import UNSET, Response, Unset
def _get_kwargs(
*,
client: Client,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
checkin: Union[Unset, None, bool] = UNSET,
) -> Dict[str, Any]:
url = "{}/manifest/checkinSession".format(client.base_url)
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
params: Dict[str, Any] = {}
params["productCode"] = product_code
params["startTime"] = start_time
params["startTimeLocal"] = start_time_local
params["checkin"] = checkin
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
return {
"method": "put",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"params": params,
}
def _parse_response(*, response: httpx.Response) -> Optional[ResponseNoData]:
if response.status_code == 200:
response_200 = ResponseNoData.from_dict(response.json())
return response_200
return None
def _build_response(*, response: httpx.Response) -> Response[ResponseNoData]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
*,
client: Client,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
checkin: Union[Unset, None, bool] = UNSET,
) -> Response[ResponseNoData]:
"""Check-in session
Store Check-in / No show flag for everyone in a specified session. The session is identified by
product code and start time (or start time local).
<br>Only available for the supplier API.<br>
Args:
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
checkin (Union[Unset, None, bool]):
Returns:
Response[ResponseNoData]
"""
kwargs = _get_kwargs(
client=client,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
checkin=checkin,
)
response = httpx.request(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
*,
client: Client,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
checkin: Union[Unset, None, bool] = UNSET,
) -> Optional[ResponseNoData]:
"""Check-in session
Store Check-in / No show flag for everyone in a specified session. The session is identified by
product code and start time (or start time local).
<br>Only available for the supplier API.<br>
Args:
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
checkin (Union[Unset, None, bool]):
Returns:
Response[ResponseNoData]
"""
return sync_detailed(
client=client,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
checkin=checkin,
).parsed
async def asyncio_detailed(
*,
client: Client,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
checkin: Union[Unset, None, bool] = UNSET,
) -> Response[ResponseNoData]:
"""Check-in session
Store Check-in / No show flag for everyone in a specified session. The session is identified by
product code and start time (or start time local).
<br>Only available for the supplier API.<br>
Args:
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
checkin (Union[Unset, None, bool]):
Returns:
Response[ResponseNoData]
"""
kwargs = _get_kwargs(
client=client,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
checkin=checkin,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.request(**kwargs)
return _build_response(response=response)
async def asyncio(
*,
client: Client,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
checkin: Union[Unset, None, bool] = UNSET,
) -> Optional[ResponseNoData]:
"""Check-in session
Store Check-in / No show flag for everyone in a specified session. The session is identified by
product code and start time (or start time local).
<br>Only available for the supplier API.<br>
Args:
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
checkin (Union[Unset, None, bool]):
Returns:
Response[ResponseNoData]
"""
return (
await asyncio_detailed(
client=client,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
checkin=checkin,
)
).parsed | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/api/manifest/checkin_session.py | 0.898014 | 0.163846 | checkin_session.py | pypi |
from typing import Any, Dict, Optional, Union
import httpx
from ...client import Client
from ...models.response_check_in import ResponseCheckIn
from ...types import UNSET, Response, Unset
def _get_kwargs(
*,
client: Client,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
) -> Dict[str, Any]:
url = "{}/manifest/checkinSession".format(client.base_url)
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
params: Dict[str, Any] = {}
params["productCode"] = product_code
params["startTime"] = start_time
params["startTimeLocal"] = start_time_local
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"params": params,
}
def _parse_response(*, response: httpx.Response) -> Optional[ResponseCheckIn]:
if response.status_code == 200:
response_200 = ResponseCheckIn.from_dict(response.json())
return response_200
return None
def _build_response(*, response: httpx.Response) -> Response[ResponseCheckIn]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
*,
client: Client,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
) -> Response[ResponseCheckIn]:
"""Get session check-in status
Retrieves the Check-in status. Checks if everyone in the whole session was checked in. The session
is identified by product code and start time (or start time local).<br>
Only available for the supplier API.<br>
Args:
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
Returns:
Response[ResponseCheckIn]
"""
kwargs = _get_kwargs(
client=client,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
)
response = httpx.request(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
*,
client: Client,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
) -> Optional[ResponseCheckIn]:
"""Get session check-in status
Retrieves the Check-in status. Checks if everyone in the whole session was checked in. The session
is identified by product code and start time (or start time local).<br>
Only available for the supplier API.<br>
Args:
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
Returns:
Response[ResponseCheckIn]
"""
return sync_detailed(
client=client,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
).parsed
async def asyncio_detailed(
*,
client: Client,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
) -> Response[ResponseCheckIn]:
"""Get session check-in status
Retrieves the Check-in status. Checks if everyone in the whole session was checked in. The session
is identified by product code and start time (or start time local).<br>
Only available for the supplier API.<br>
Args:
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
Returns:
Response[ResponseCheckIn]
"""
kwargs = _get_kwargs(
client=client,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.request(**kwargs)
return _build_response(response=response)
async def asyncio(
*,
client: Client,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
) -> Optional[ResponseCheckIn]:
"""Get session check-in status
Retrieves the Check-in status. Checks if everyone in the whole session was checked in. The session
is identified by product code and start time (or start time local).<br>
Only available for the supplier API.<br>
Args:
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
Returns:
Response[ResponseCheckIn]
"""
return (
await asyncio_detailed(
client=client,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
)
).parsed | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/api/manifest/checkin_session_status.py | 0.903818 | 0.171442 | checkin_session_status.py | pypi |
from typing import Any, Dict, Optional, Union
import httpx
from ...client import Client
from ...models.response_no_data import ResponseNoData
from ...types import UNSET, Response, Unset
def _get_kwargs(
*,
client: Client,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
) -> Dict[str, Any]:
url = "{}/manifest/checkinSession".format(client.base_url)
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
params: Dict[str, Any] = {}
params["productCode"] = product_code
params["startTime"] = start_time
params["startTimeLocal"] = start_time_local
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
return {
"method": "delete",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"params": params,
}
def _parse_response(*, response: httpx.Response) -> Optional[ResponseNoData]:
if response.status_code == 200:
response_200 = ResponseNoData.from_dict(response.json())
return response_200
return None
def _build_response(*, response: httpx.Response) -> Response[ResponseNoData]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
*,
client: Client,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
) -> Response[ResponseNoData]:
"""Remove session check-in
Remove Check-in / No show flag from everyone in the whole session. The session is identified by
product code and start time (or start time local).<br>
Only available for the supplier API.<br>
Args:
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
Returns:
Response[ResponseNoData]
"""
kwargs = _get_kwargs(
client=client,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
)
response = httpx.request(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
*,
client: Client,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
) -> Optional[ResponseNoData]:
"""Remove session check-in
Remove Check-in / No show flag from everyone in the whole session. The session is identified by
product code and start time (or start time local).<br>
Only available for the supplier API.<br>
Args:
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
Returns:
Response[ResponseNoData]
"""
return sync_detailed(
client=client,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
).parsed
async def asyncio_detailed(
*,
client: Client,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
) -> Response[ResponseNoData]:
"""Remove session check-in
Remove Check-in / No show flag from everyone in the whole session. The session is identified by
product code and start time (or start time local).<br>
Only available for the supplier API.<br>
Args:
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
Returns:
Response[ResponseNoData]
"""
kwargs = _get_kwargs(
client=client,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.request(**kwargs)
return _build_response(response=response)
async def asyncio(
*,
client: Client,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
) -> Optional[ResponseNoData]:
"""Remove session check-in
Remove Check-in / No show flag from everyone in the whole session. The session is identified by
product code and start time (or start time local).<br>
Only available for the supplier API.<br>
Args:
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
Returns:
Response[ResponseNoData]
"""
return (
await asyncio_detailed(
client=client,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
)
).parsed | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/api/manifest/cancel_checkin_session.py | 0.903755 | 0.151655 | cancel_checkin_session.py | pypi |
from typing import Any, Dict, Optional, Union
import httpx
from ...client import Client
from ...models.response_check_in import ResponseCheckIn
from ...types import UNSET, Response, Unset
def _get_kwargs(
*,
client: Client,
order_number: Union[Unset, None, str] = UNSET,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
) -> Dict[str, Any]:
url = "{}/manifest/checkinOrderSession".format(client.base_url)
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
params: Dict[str, Any] = {}
params["orderNumber"] = order_number
params["productCode"] = product_code
params["startTime"] = start_time
params["startTimeLocal"] = start_time_local
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"params": params,
}
def _parse_response(*, response: httpx.Response) -> Optional[ResponseCheckIn]:
if response.status_code == 200:
response_200 = ResponseCheckIn.from_dict(response.json())
return response_200
return None
def _build_response(*, response: httpx.Response) -> Response[ResponseCheckIn]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
*,
client: Client,
order_number: Union[Unset, None, str] = UNSET,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
) -> Response[ResponseCheckIn]:
"""Get order item check-in status
Retrieves the Check-in status. Checks if everyone in the whole session was checked in. The session
is identified by product code and start time (or start time local).<br>
Only available for the supplier API.<br>
Args:
order_number (Union[Unset, None, str]):
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
Returns:
Response[ResponseCheckIn]
"""
kwargs = _get_kwargs(
client=client,
order_number=order_number,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
)
response = httpx.request(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
*,
client: Client,
order_number: Union[Unset, None, str] = UNSET,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
) -> Optional[ResponseCheckIn]:
"""Get order item check-in status
Retrieves the Check-in status. Checks if everyone in the whole session was checked in. The session
is identified by product code and start time (or start time local).<br>
Only available for the supplier API.<br>
Args:
order_number (Union[Unset, None, str]):
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
Returns:
Response[ResponseCheckIn]
"""
return sync_detailed(
client=client,
order_number=order_number,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
).parsed
async def asyncio_detailed(
*,
client: Client,
order_number: Union[Unset, None, str] = UNSET,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
) -> Response[ResponseCheckIn]:
"""Get order item check-in status
Retrieves the Check-in status. Checks if everyone in the whole session was checked in. The session
is identified by product code and start time (or start time local).<br>
Only available for the supplier API.<br>
Args:
order_number (Union[Unset, None, str]):
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
Returns:
Response[ResponseCheckIn]
"""
kwargs = _get_kwargs(
client=client,
order_number=order_number,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.request(**kwargs)
return _build_response(response=response)
async def asyncio(
*,
client: Client,
order_number: Union[Unset, None, str] = UNSET,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
) -> Optional[ResponseCheckIn]:
"""Get order item check-in status
Retrieves the Check-in status. Checks if everyone in the whole session was checked in. The session
is identified by product code and start time (or start time local).<br>
Only available for the supplier API.<br>
Args:
order_number (Union[Unset, None, str]):
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
Returns:
Response[ResponseCheckIn]
"""
return (
await asyncio_detailed(
client=client,
order_number=order_number,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
)
).parsed | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/api/manifest/checkin_order_session_status.py | 0.892137 | 0.171755 | checkin_order_session_status.py | pypi |
from typing import Any, Dict, Optional, Union
import httpx
from ...client import Client
from ...models.response_no_data import ResponseNoData
from ...types import UNSET, Response, Unset
def _get_kwargs(
*,
client: Client,
order_number: Union[Unset, None, str] = UNSET,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
checkin: Union[Unset, None, bool] = UNSET,
) -> Dict[str, Any]:
url = "{}/manifest/checkinOrderSession".format(client.base_url)
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
params: Dict[str, Any] = {}
params["orderNumber"] = order_number
params["productCode"] = product_code
params["startTime"] = start_time
params["startTimeLocal"] = start_time_local
params["checkin"] = checkin
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
return {
"method": "put",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"params": params,
}
def _parse_response(*, response: httpx.Response) -> Optional[ResponseNoData]:
if response.status_code == 200:
response_200 = ResponseNoData.from_dict(response.json())
return response_200
return None
def _build_response(*, response: httpx.Response) -> Response[ResponseNoData]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
*,
client: Client,
order_number: Union[Unset, None, str] = UNSET,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
checkin: Union[Unset, None, bool] = UNSET,
) -> Response[ResponseNoData]:
"""Check-in order item
Place Check-in a / No show flag for the specified order item. The order item is identified by order
number, product code and start time (or start time local).<br>
Only available for the supplier API.<br>
Args:
order_number (Union[Unset, None, str]):
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
checkin (Union[Unset, None, bool]):
Returns:
Response[ResponseNoData]
"""
kwargs = _get_kwargs(
client=client,
order_number=order_number,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
checkin=checkin,
)
response = httpx.request(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
*,
client: Client,
order_number: Union[Unset, None, str] = UNSET,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
checkin: Union[Unset, None, bool] = UNSET,
) -> Optional[ResponseNoData]:
"""Check-in order item
Place Check-in a / No show flag for the specified order item. The order item is identified by order
number, product code and start time (or start time local).<br>
Only available for the supplier API.<br>
Args:
order_number (Union[Unset, None, str]):
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
checkin (Union[Unset, None, bool]):
Returns:
Response[ResponseNoData]
"""
return sync_detailed(
client=client,
order_number=order_number,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
checkin=checkin,
).parsed
async def asyncio_detailed(
*,
client: Client,
order_number: Union[Unset, None, str] = UNSET,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
checkin: Union[Unset, None, bool] = UNSET,
) -> Response[ResponseNoData]:
"""Check-in order item
Place Check-in a / No show flag for the specified order item. The order item is identified by order
number, product code and start time (or start time local).<br>
Only available for the supplier API.<br>
Args:
order_number (Union[Unset, None, str]):
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
checkin (Union[Unset, None, bool]):
Returns:
Response[ResponseNoData]
"""
kwargs = _get_kwargs(
client=client,
order_number=order_number,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
checkin=checkin,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.request(**kwargs)
return _build_response(response=response)
async def asyncio(
*,
client: Client,
order_number: Union[Unset, None, str] = UNSET,
product_code: str,
start_time: Union[Unset, None, str] = UNSET,
start_time_local: Union[Unset, None, str] = UNSET,
checkin: Union[Unset, None, bool] = UNSET,
) -> Optional[ResponseNoData]:
"""Check-in order item
Place Check-in a / No show flag for the specified order item. The order item is identified by order
number, product code and start time (or start time local).<br>
Only available for the supplier API.<br>
Args:
order_number (Union[Unset, None, str]):
product_code (str):
start_time (Union[Unset, None, str]):
start_time_local (Union[Unset, None, str]):
checkin (Union[Unset, None, bool]):
Returns:
Response[ResponseNoData]
"""
return (
await asyncio_detailed(
client=client,
order_number=order_number,
product_code=product_code,
start_time=start_time,
start_time_local=start_time_local,
checkin=checkin,
)
).parsed | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/api/manifest/checkin_order_session.py | 0.905377 | 0.163345 | checkin_order_session.py | pypi |
import datetime
from typing import Any, Dict, List, Type, TypeVar, Union
import attr
from dateutil.parser import isoparse
from ..models.price_option import PriceOption
from ..types import UNSET, Unset
T = TypeVar("T", bound="SessionCreateRequest")
@attr.s(auto_attribs=True)
class SessionCreateRequest:
"""Create session request data.
Attributes:
product_code (str): Rezdy unique productCode linked to this session
seats (int): Total number of seats for this session. Does not change after a booking is made
all_day (Union[Unset, bool]): If true, this session lasts all day and no time should be shown to customers.
Technically the session will be from midnight to midnight.
end_time (Union[Unset, datetime.datetime]): End time of this session. Used to show the customer how long that
tour will last
end_time_local (Union[Unset, str]): End time of this session in supplier's local timezone. Used to show the
customer how long that tour will last
price_options (Union[Unset, List[PriceOption]]): List of price options attached to this session. Most of the
time they'll match the product's price options, but they can be used to change the price of specific dates/times
(I.e. high/low season, weekday/weekend, etc.)
start_time (Union[Unset, datetime.datetime]): Start Time of this session. This time should be used when showing
customers the booking date/time. It should be sent in BookingItem.startTime when making new bookings
start_time_local (Union[Unset, str]): Start Time of this session in supplier's local timezone. This time should
be used when showing customers the booking date/time. It should be sent in BookingItem.startTimeLocal when
making new bookings
"""
product_code: str
seats: int
all_day: Union[Unset, bool] = UNSET
end_time: Union[Unset, datetime.datetime] = UNSET
end_time_local: Union[Unset, str] = UNSET
price_options: Union[Unset, List[PriceOption]] = UNSET
start_time: Union[Unset, datetime.datetime] = UNSET
start_time_local: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
product_code = self.product_code
seats = self.seats
all_day = self.all_day
end_time: Union[Unset, str] = UNSET
if not isinstance(self.end_time, Unset):
end_time = self.end_time.isoformat()
end_time_local = self.end_time_local
price_options: Union[Unset, List[Dict[str, Any]]] = UNSET
if not isinstance(self.price_options, Unset):
price_options = []
for price_options_item_data in self.price_options:
price_options_item = price_options_item_data.to_dict()
price_options.append(price_options_item)
start_time: Union[Unset, str] = UNSET
if not isinstance(self.start_time, Unset):
start_time = self.start_time.isoformat()
start_time_local = self.start_time_local
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update(
{
"productCode": product_code,
"seats": seats,
}
)
if all_day is not UNSET:
field_dict["allDay"] = all_day
if end_time is not UNSET:
field_dict["endTime"] = end_time
if end_time_local is not UNSET:
field_dict["endTimeLocal"] = end_time_local
if price_options is not UNSET:
field_dict["priceOptions"] = price_options
if start_time is not UNSET:
field_dict["startTime"] = start_time
if start_time_local is not UNSET:
field_dict["startTimeLocal"] = start_time_local
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
product_code = d.pop("productCode")
seats = d.pop("seats")
all_day = d.pop("allDay", UNSET)
_end_time = d.pop("endTime", UNSET)
end_time: Union[Unset, datetime.datetime]
if isinstance(_end_time, Unset):
end_time = UNSET
else:
end_time = isoparse(_end_time)
end_time_local = d.pop("endTimeLocal", UNSET)
price_options = []
_price_options = d.pop("priceOptions", UNSET)
for price_options_item_data in _price_options or []:
price_options_item = PriceOption.from_dict(price_options_item_data)
price_options.append(price_options_item)
_start_time = d.pop("startTime", UNSET)
start_time: Union[Unset, datetime.datetime]
if isinstance(_start_time, Unset):
start_time = UNSET
else:
start_time = isoparse(_start_time)
start_time_local = d.pop("startTimeLocal", UNSET)
session_create_request = cls(
product_code=product_code,
seats=seats,
all_day=all_day,
end_time=end_time,
end_time_local=end_time_local,
price_options=price_options,
start_time=start_time,
start_time_local=start_time_local,
)
session_create_request.additional_properties = d
return session_create_request
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/models/session_create_request.py | 0.715722 | 0.383988 | session_create_request.py | pypi |
from typing import Any, Dict, List, Type, TypeVar, Union
import attr
from ..models.rate import Rate
from ..models.request_status import RequestStatus
from ..types import UNSET, Unset
T = TypeVar("T", bound="ResponseRateList")
@attr.s(auto_attribs=True)
class ResponseRateList:
"""
Attributes:
request_status (RequestStatus):
rates (Union[Unset, List[Rate]]):
"""
request_status: RequestStatus
rates: Union[Unset, List[Rate]] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
request_status = self.request_status.to_dict()
rates: Union[Unset, List[Dict[str, Any]]] = UNSET
if not isinstance(self.rates, Unset):
rates = []
for rates_item_data in self.rates:
rates_item = rates_item_data.to_dict()
rates.append(rates_item)
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update(
{
"requestStatus": request_status,
}
)
if rates is not UNSET:
field_dict["rates"] = rates
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
request_status = RequestStatus.from_dict(d.pop("requestStatus"))
rates = []
_rates = d.pop("rates", UNSET)
for rates_item_data in _rates or []:
rates_item = Rate.from_dict(rates_item_data)
rates.append(rates_item)
response_rate_list = cls(
request_status=request_status,
rates=rates,
)
response_rate_list.additional_properties = d
return response_rate_list
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/models/response_rate_list.py | 0.802633 | 0.250145 | response_rate_list.py | pypi |
from typing import Any, Dict, List, Type, TypeVar, Union
import attr
from ..models.booking import Booking
from ..models.request_status import RequestStatus
from ..types import UNSET, Unset
T = TypeVar("T", bound="ResponseBookingList")
@attr.s(auto_attribs=True)
class ResponseBookingList:
"""
Attributes:
request_status (RequestStatus):
bookings (Union[Unset, List[Booking]]):
"""
request_status: RequestStatus
bookings: Union[Unset, List[Booking]] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
request_status = self.request_status.to_dict()
bookings: Union[Unset, List[Dict[str, Any]]] = UNSET
if not isinstance(self.bookings, Unset):
bookings = []
for bookings_item_data in self.bookings:
bookings_item = bookings_item_data.to_dict()
bookings.append(bookings_item)
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update(
{
"requestStatus": request_status,
}
)
if bookings is not UNSET:
field_dict["bookings"] = bookings
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
request_status = RequestStatus.from_dict(d.pop("requestStatus"))
bookings = []
_bookings = d.pop("bookings", UNSET)
for bookings_item_data in _bookings or []:
bookings_item = Booking.from_dict(bookings_item_data)
bookings.append(bookings_item)
response_booking_list = cls(
request_status=request_status,
bookings=bookings,
)
response_booking_list.additional_properties = d
return response_booking_list
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/models/response_booking_list.py | 0.761095 | 0.237543 | response_booking_list.py | pypi |
from typing import Any, Dict, List, Type, TypeVar, Union
import attr
from ..models.error import Error
from ..models.warning import Warning_
from ..types import UNSET, Unset
T = TypeVar("T", bound="RequestStatus")
@attr.s(auto_attribs=True)
class RequestStatus:
"""
Attributes:
success (bool):
error (Union[Unset, Error]):
version (Union[Unset, str]):
warning (Union[Unset, Warning_]):
"""
success: bool
error: Union[Unset, Error] = UNSET
version: Union[Unset, str] = UNSET
warning: Union[Unset, Warning_] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
success = self.success
error: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.error, Unset):
error = self.error.to_dict()
version = self.version
warning: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.warning, Unset):
warning = self.warning.to_dict()
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update(
{
"success": success,
}
)
if error is not UNSET:
field_dict["error"] = error
if version is not UNSET:
field_dict["version"] = version
if warning is not UNSET:
field_dict["warning"] = warning
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
success = d.pop("success")
_error = d.pop("error", UNSET)
error: Union[Unset, Error]
if isinstance(_error, Unset):
error = UNSET
else:
error = Error.from_dict(_error)
version = d.pop("version", UNSET)
_warning = d.pop("warning", UNSET)
warning: Union[Unset, Warning_]
if isinstance(_warning, Unset):
warning = UNSET
else:
warning = Warning_.from_dict(_warning)
request_status = cls(
success=success,
error=error,
version=version,
warning=warning,
)
request_status.additional_properties = d
return request_status
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/models/request_status.py | 0.728555 | 0.168994 | request_status.py | pypi |
from typing import Any, Dict, List, Type, TypeVar, Union
import attr
from ..models.extra_create_request_extra_price_type import ExtraCreateRequestExtraPriceType
from ..types import UNSET, Unset
T = TypeVar("T", bound="ExtraCreateRequest")
@attr.s(auto_attribs=True)
class ExtraCreateRequest:
"""Partial optional service or item that can be purchased when booking a specific product
Attributes:
description (Union[Unset, str]): Description of the extra
extra_price_type (Union[Unset, ExtraCreateRequestExtraPriceType]): Price type for this extra. Defines what
quantities are allowed and how their price is calculated
name (Union[Unset, str]): Name of the extra
price (Union[Unset, float]): Price for a single quantity of this extra
"""
description: Union[Unset, str] = UNSET
extra_price_type: Union[Unset, ExtraCreateRequestExtraPriceType] = UNSET
name: Union[Unset, str] = UNSET
price: Union[Unset, float] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
description = self.description
extra_price_type: Union[Unset, str] = UNSET
if not isinstance(self.extra_price_type, Unset):
extra_price_type = self.extra_price_type.value
name = self.name
price = self.price
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
if description is not UNSET:
field_dict["description"] = description
if extra_price_type is not UNSET:
field_dict["extraPriceType"] = extra_price_type
if name is not UNSET:
field_dict["name"] = name
if price is not UNSET:
field_dict["price"] = price
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
description = d.pop("description", UNSET)
_extra_price_type = d.pop("extraPriceType", UNSET)
extra_price_type: Union[Unset, ExtraCreateRequestExtraPriceType]
if isinstance(_extra_price_type, Unset):
extra_price_type = UNSET
else:
extra_price_type = ExtraCreateRequestExtraPriceType(_extra_price_type)
name = d.pop("name", UNSET)
price = d.pop("price", UNSET)
extra_create_request = cls(
description=description,
extra_price_type=extra_price_type,
name=name,
price=price,
)
extra_create_request.additional_properties = d
return extra_create_request
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/models/extra_create_request.py | 0.842879 | 0.289196 | extra_create_request.py | pypi |
from typing import Any, Dict, List, Type, TypeVar, Union
import attr
from ..models.credit_card_card_type import CreditCardCardType
from ..types import UNSET, Unset
T = TypeVar("T", bound="CreditCard")
@attr.s(auto_attribs=True)
class CreditCard:
"""Credit card details.<p>Used to send payment details for a booking</p>
Attributes:
card_country (Union[Unset, str]):
card_name (Union[Unset, str]): Full name on the credit card
card_number (Union[Unset, str]): Full Credit card number, no spaces or special characters
card_security_number (Union[Unset, str]): Credit Card CVV / CVN
card_token (Union[Unset, str]): Credit card Token generated by client libraries. Use instead of other card
attributes for supported gateways.<br>For example when using Stripe.js, only send the cardToken attribute. Do
not send cardNumber or any other attribute.
card_type (Union[Unset, CreditCardCardType]): Type of card
expiry_month (Union[Unset, str]): <ul>Expiry month of the card<li>01 : January</li><li>02 : February</li><li>03
: March</li><li>04 : April</li><li>05 : May</li><li>06 : June</li><li>07 : July</li><li>08 : August</li><li>09 :
September</li><li>10 : October</li><li>11 : November</li><li>12 : December</li></ul>
expiry_year (Union[Unset, str]): Expiry year of the card in 4 digits<br>(i.e 2015 / 2016 ...)
"""
card_country: Union[Unset, str] = UNSET
card_name: Union[Unset, str] = UNSET
card_number: Union[Unset, str] = UNSET
card_security_number: Union[Unset, str] = UNSET
card_token: Union[Unset, str] = UNSET
card_type: Union[Unset, CreditCardCardType] = UNSET
expiry_month: Union[Unset, str] = UNSET
expiry_year: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
card_country = self.card_country
card_name = self.card_name
card_number = self.card_number
card_security_number = self.card_security_number
card_token = self.card_token
card_type: Union[Unset, str] = UNSET
if not isinstance(self.card_type, Unset):
card_type = self.card_type.value
expiry_month = self.expiry_month
expiry_year = self.expiry_year
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
if card_country is not UNSET:
field_dict["cardCountry"] = card_country
if card_name is not UNSET:
field_dict["cardName"] = card_name
if card_number is not UNSET:
field_dict["cardNumber"] = card_number
if card_security_number is not UNSET:
field_dict["cardSecurityNumber"] = card_security_number
if card_token is not UNSET:
field_dict["cardToken"] = card_token
if card_type is not UNSET:
field_dict["cardType"] = card_type
if expiry_month is not UNSET:
field_dict["expiryMonth"] = expiry_month
if expiry_year is not UNSET:
field_dict["expiryYear"] = expiry_year
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
card_country = d.pop("cardCountry", UNSET)
card_name = d.pop("cardName", UNSET)
card_number = d.pop("cardNumber", UNSET)
card_security_number = d.pop("cardSecurityNumber", UNSET)
card_token = d.pop("cardToken", UNSET)
_card_type = d.pop("cardType", UNSET)
card_type: Union[Unset, CreditCardCardType]
if isinstance(_card_type, Unset):
card_type = UNSET
else:
card_type = CreditCardCardType(_card_type)
expiry_month = d.pop("expiryMonth", UNSET)
expiry_year = d.pop("expiryYear", UNSET)
credit_card = cls(
card_country=card_country,
card_name=card_name,
card_number=card_number,
card_security_number=card_security_number,
card_token=card_token,
card_type=card_type,
expiry_month=expiry_month,
expiry_year=expiry_year,
)
credit_card.additional_properties = d
return credit_card
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/models/credit_card.py | 0.829871 | 0.198958 | credit_card.py | pypi |
import json
from typing import Any, Dict, List, Type, TypeVar, Union
import attr
from ..models.form_data_content_disposition import FormDataContentDisposition
from ..types import UNSET, Unset
T = TypeVar("T", bound="AddProductImageMultipartData")
@attr.s(auto_attribs=True)
class AddProductImageMultipartData:
"""
Attributes:
file (FormDataContentDisposition):
filename (Union[Unset, str]):
"""
file: FormDataContentDisposition
filename: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
file = self.file.to_dict()
filename = self.filename
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update(
{
"file": file,
}
)
if filename is not UNSET:
field_dict["filename"] = filename
return field_dict
def to_multipart(self) -> Dict[str, Any]:
file = (None, json.dumps(self.file.to_dict()).encode(), "application/json")
filename = (
self.filename if isinstance(self.filename, Unset) else (None, str(self.filename).encode(), "text/plain")
)
field_dict: Dict[str, Any] = {}
field_dict.update(
{key: (None, str(value).encode(), "text/plain") for key, value in self.additional_properties.items()}
)
field_dict.update(
{
"file": file,
}
)
if filename is not UNSET:
field_dict["filename"] = filename
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
file = FormDataContentDisposition.from_dict(d.pop("file"))
filename = d.pop("filename", UNSET)
add_product_image_multipart_data = cls(
file=file,
filename=filename,
)
add_product_image_multipart_data.additional_properties = d
return add_product_image_multipart_data
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/models/add_product_image_multipart_data.py | 0.678966 | 0.151749 | add_product_image_multipart_data.py | pypi |
from typing import Any, Dict, List, Type, TypeVar, Union
import attr
from ..models.price_option_price_group_type import PriceOptionPriceGroupType
from ..types import UNSET, Unset
T = TypeVar("T", bound="PriceOption")
@attr.s(auto_attribs=True)
class PriceOption:
"""A Price Option belongs to a product or products session. It holds the price details for a specific price
type.Products can have one or many price options (I.e. Adult, Child, Family, etc.)
Attributes:
id (Union[Unset, int]):
label (Union[Unset, str]): Label for this price (I.e. "Adult", "Child")
max_quantity (Union[Unset, int]): Max booking quantity for the product price option. Can be specified, if the
price option is fixed or a grouptype. For a successful booking of the product, the number of participants for
this price option have to be lesser or equal than this value.
min_quantity (Union[Unset, int]): Min booking quantity for the product price option. Can be specified, if the
price option is fixed or a group type. For a successful booking of the product, the number of participants for
this price option have to be greater or equal than this value.
price (Union[Unset, float]): Price amount (I.e. "200.00")
price_group_type (Union[Unset, PriceOptionPriceGroupType]): If this price is for a group, is the price for the
whole group (TOTAL) or per quantity (EACH)
product_code (Union[Unset, str]): Product code to which the price options belongs to. Since Rezdy introduced
shared availability option for products, the product sessions can contain price overrides for all of the
products, which share the sessions. Therefore it is necessary to filer only the price options matching the
chosen product code on the client side, when processing /availability service responses.
seats_used (Union[Unset, int]): How many seats one quantity of this price willuse. Used for availability
calculations. For example 1 quantity of "Family of 4" will use 4 seats.
"""
id: Union[Unset, int] = UNSET
label: Union[Unset, str] = UNSET
max_quantity: Union[Unset, int] = UNSET
min_quantity: Union[Unset, int] = UNSET
price: Union[Unset, float] = UNSET
price_group_type: Union[Unset, PriceOptionPriceGroupType] = UNSET
product_code: Union[Unset, str] = UNSET
seats_used: Union[Unset, int] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
id = self.id
label = self.label
max_quantity = self.max_quantity
min_quantity = self.min_quantity
price = self.price
price_group_type: Union[Unset, str] = UNSET
if not isinstance(self.price_group_type, Unset):
price_group_type = self.price_group_type.value
product_code = self.product_code
seats_used = self.seats_used
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
if id is not UNSET:
field_dict["id"] = id
if label is not UNSET:
field_dict["label"] = label
if max_quantity is not UNSET:
field_dict["maxQuantity"] = max_quantity
if min_quantity is not UNSET:
field_dict["minQuantity"] = min_quantity
if price is not UNSET:
field_dict["price"] = price
if price_group_type is not UNSET:
field_dict["priceGroupType"] = price_group_type
if product_code is not UNSET:
field_dict["productCode"] = product_code
if seats_used is not UNSET:
field_dict["seatsUsed"] = seats_used
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
id = d.pop("id", UNSET)
label = d.pop("label", UNSET)
max_quantity = d.pop("maxQuantity", UNSET)
min_quantity = d.pop("minQuantity", UNSET)
price = d.pop("price", UNSET)
_price_group_type = d.pop("priceGroupType", UNSET)
price_group_type: Union[Unset, PriceOptionPriceGroupType]
if isinstance(_price_group_type, Unset):
price_group_type = UNSET
else:
price_group_type = PriceOptionPriceGroupType(_price_group_type)
product_code = d.pop("productCode", UNSET)
seats_used = d.pop("seatsUsed", UNSET)
price_option = cls(
id=id,
label=label,
max_quantity=max_quantity,
min_quantity=min_quantity,
price=price,
price_group_type=price_group_type,
product_code=product_code,
seats_used=seats_used,
)
price_option.additional_properties = d
return price_option
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/models/price_option.py | 0.850934 | 0.469338 | price_option.py | pypi |
import datetime
from typing import Any, Dict, List, Type, TypeVar, Union, cast
import attr
from dateutil.parser import isoparse
from ..models.booking_create_barcode_type import BookingCreateBarcodeType
from ..models.booking_create_payment_option import BookingCreatePaymentOption
from ..models.booking_create_reseller_source import BookingCreateResellerSource
from ..models.booking_create_source import BookingCreateSource
from ..models.booking_create_status import BookingCreateStatus
from ..models.booking_field import BookingField
from ..models.booking_item_create import BookingItemCreate
from ..models.booking_payment import BookingPayment
from ..models.credit_card import CreditCard
from ..models.customer import Customer
from ..models.user import User
from ..types import UNSET, Unset
T = TypeVar("T", bound="BookingCreate")
@attr.s(auto_attribs=True)
class BookingCreate:
"""Booking create object used to create a booking in Rezdy's system. Lists all the possible fields for all product
types and scenarios. Most of them are not required when sending a new booking.<br>A single Booking can be used to
book multiple products, each of them being a BookingItem. All the products of one booking have to be from the same
supplier.
Attributes:
barcode_type (Union[Unset, BookingCreateBarcodeType]): Declares the redemption code format customers will
receive if the booking was created with barcodes.
comments (Union[Unset, str]): Special requirements entered by the customer. Visible to both customer and
supplier.
coupon (Union[Unset, str]): Promo code that has been applied to this booking
credit_card (Union[Unset, CreditCard]): Credit card details.<p>Used to send payment details for a booking</p>
customer (Union[Unset, Customer]): The customer is the person making the booking, and most of the time paying
for it.<br>It differs from Participants, who are the people attending a tour
date_paid (Union[Unset, datetime.datetime]): Date this booking was fully paid
date_reconciled (Union[Unset, datetime.datetime]): Date this booking was reconciled with the agent
fields (Union[Unset, List[BookingField]]): List of custom fields that are required "once per booking" by all the
products in this booking
internal_notes (Union[Unset, str]): Comments only visible internally by the supplier
items (Union[Unset, List[BookingItemCreate]]): List of items in this booking. A booking can contain multiple
products. Each BookingItem is a separate product with its own set of quantities and participant details.
order_number (Union[Unset, str]): Order number. This is the number you should give to customers and print on
booking confirmations. Order number is generated by the system, therefore, even if it is specified in the
booking request, it will be overwritten.
payment_option (Union[Unset, BookingCreatePaymentOption]): Payment option selected by the customer when making
an online booking
payments (Union[Unset, List[BookingPayment]]): List of payments recorded for this booking
reseller_comments (Union[Unset, str]): Comments only visible by the agent and the supplier. This should be used
by the agent to send voucher numbers/redemption codes to suppliers.
reseller_id (Union[Unset, int]): Rezdy internal ID of the agent company attached to this booking
reseller_reference (Union[Unset, str]): External reseller reference, can be used to pass internal booking
number. This reference will be shown to a supplier, also it will appear on reports and can be used to filter
orders. Maxiumum number of characters is 30
reseller_source (Union[Unset, BookingCreateResellerSource]): Source of this booking viewed from the agent
reseller_user (Union[Unset, User]): Internal Rezdy user details. This is a Rezdy application user who belongs to
a Rezdy agent or supplier company.
send_notifications (Union[Unset, bool]): Flag to control if a booking confirmation email should be send to the
customer after this booking is created.<br>This will also send other types of customer notifications when setup
by the supplier (I.e. SMS, Gift cards) Default: True.
source (Union[Unset, BookingCreateSource]): Source of this booking viewed from the supplier
source_channel (Union[Unset, str]): Agent code defined by the supplier
source_referrer (Union[Unset, str]): Referrer code
status (Union[Unset, BookingCreateStatus]): Status of this booking
surcharge (Union[Unset, float]): Credit card surcharge calculated for this booking
total_amount (Union[Unset, float]): Total booking amount
vouchers (Union[Unset, List[str]]): List of vouchers (Gift cards) that have been redeemed to pay for this
booking
"""
barcode_type: Union[Unset, BookingCreateBarcodeType] = UNSET
comments: Union[Unset, str] = UNSET
coupon: Union[Unset, str] = UNSET
credit_card: Union[Unset, CreditCard] = UNSET
customer: Union[Unset, Customer] = UNSET
date_paid: Union[Unset, datetime.datetime] = UNSET
date_reconciled: Union[Unset, datetime.datetime] = UNSET
fields: Union[Unset, List[BookingField]] = UNSET
internal_notes: Union[Unset, str] = UNSET
items: Union[Unset, List[BookingItemCreate]] = UNSET
order_number: Union[Unset, str] = UNSET
payment_option: Union[Unset, BookingCreatePaymentOption] = UNSET
payments: Union[Unset, List[BookingPayment]] = UNSET
reseller_comments: Union[Unset, str] = UNSET
reseller_id: Union[Unset, int] = UNSET
reseller_reference: Union[Unset, str] = UNSET
reseller_source: Union[Unset, BookingCreateResellerSource] = UNSET
reseller_user: Union[Unset, User] = UNSET
send_notifications: Union[Unset, bool] = True
source: Union[Unset, BookingCreateSource] = UNSET
source_channel: Union[Unset, str] = UNSET
source_referrer: Union[Unset, str] = UNSET
status: Union[Unset, BookingCreateStatus] = UNSET
surcharge: Union[Unset, float] = UNSET
total_amount: Union[Unset, float] = UNSET
vouchers: Union[Unset, List[str]] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
barcode_type: Union[Unset, str] = UNSET
if not isinstance(self.barcode_type, Unset):
barcode_type = self.barcode_type.value
comments = self.comments
coupon = self.coupon
credit_card: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.credit_card, Unset):
credit_card = self.credit_card.to_dict()
customer: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.customer, Unset):
customer = self.customer.to_dict()
date_paid: Union[Unset, str] = UNSET
if not isinstance(self.date_paid, Unset):
date_paid = self.date_paid.isoformat()
date_reconciled: Union[Unset, str] = UNSET
if not isinstance(self.date_reconciled, Unset):
date_reconciled = self.date_reconciled.isoformat()
fields: Union[Unset, List[Dict[str, Any]]] = UNSET
if not isinstance(self.fields, Unset):
fields = []
for fields_item_data in self.fields:
fields_item = fields_item_data.to_dict()
fields.append(fields_item)
internal_notes = self.internal_notes
items: Union[Unset, List[Dict[str, Any]]] = UNSET
if not isinstance(self.items, Unset):
items = []
for items_item_data in self.items:
items_item = items_item_data.to_dict()
items.append(items_item)
order_number = self.order_number
payment_option: Union[Unset, str] = UNSET
if not isinstance(self.payment_option, Unset):
payment_option = self.payment_option.value
payments: Union[Unset, List[Dict[str, Any]]] = UNSET
if not isinstance(self.payments, Unset):
payments = []
for payments_item_data in self.payments:
payments_item = payments_item_data.to_dict()
payments.append(payments_item)
reseller_comments = self.reseller_comments
reseller_id = self.reseller_id
reseller_reference = self.reseller_reference
reseller_source: Union[Unset, str] = UNSET
if not isinstance(self.reseller_source, Unset):
reseller_source = self.reseller_source.value
reseller_user: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.reseller_user, Unset):
reseller_user = self.reseller_user.to_dict()
send_notifications = self.send_notifications
source: Union[Unset, str] = UNSET
if not isinstance(self.source, Unset):
source = self.source.value
source_channel = self.source_channel
source_referrer = self.source_referrer
status: Union[Unset, str] = UNSET
if not isinstance(self.status, Unset):
status = self.status.value
surcharge = self.surcharge
total_amount = self.total_amount
vouchers: Union[Unset, List[str]] = UNSET
if not isinstance(self.vouchers, Unset):
vouchers = self.vouchers
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
if barcode_type is not UNSET:
field_dict["barcodeType"] = barcode_type
if comments is not UNSET:
field_dict["comments"] = comments
if coupon is not UNSET:
field_dict["coupon"] = coupon
if credit_card is not UNSET:
field_dict["creditCard"] = credit_card
if customer is not UNSET:
field_dict["customer"] = customer
if date_paid is not UNSET:
field_dict["datePaid"] = date_paid
if date_reconciled is not UNSET:
field_dict["dateReconciled"] = date_reconciled
if fields is not UNSET:
field_dict["fields"] = fields
if internal_notes is not UNSET:
field_dict["internalNotes"] = internal_notes
if items is not UNSET:
field_dict["items"] = items
if order_number is not UNSET:
field_dict["orderNumber"] = order_number
if payment_option is not UNSET:
field_dict["paymentOption"] = payment_option
if payments is not UNSET:
field_dict["payments"] = payments
if reseller_comments is not UNSET:
field_dict["resellerComments"] = reseller_comments
if reseller_id is not UNSET:
field_dict["resellerId"] = reseller_id
if reseller_reference is not UNSET:
field_dict["resellerReference"] = reseller_reference
if reseller_source is not UNSET:
field_dict["resellerSource"] = reseller_source
if reseller_user is not UNSET:
field_dict["resellerUser"] = reseller_user
if send_notifications is not UNSET:
field_dict["sendNotifications"] = send_notifications
if source is not UNSET:
field_dict["source"] = source
if source_channel is not UNSET:
field_dict["sourceChannel"] = source_channel
if source_referrer is not UNSET:
field_dict["sourceReferrer"] = source_referrer
if status is not UNSET:
field_dict["status"] = status
if surcharge is not UNSET:
field_dict["surcharge"] = surcharge
if total_amount is not UNSET:
field_dict["totalAmount"] = total_amount
if vouchers is not UNSET:
field_dict["vouchers"] = vouchers
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
_barcode_type = d.pop("barcodeType", UNSET)
barcode_type: Union[Unset, BookingCreateBarcodeType]
if isinstance(_barcode_type, Unset):
barcode_type = UNSET
else:
barcode_type = BookingCreateBarcodeType(_barcode_type)
comments = d.pop("comments", UNSET)
coupon = d.pop("coupon", UNSET)
_credit_card = d.pop("creditCard", UNSET)
credit_card: Union[Unset, CreditCard]
if isinstance(_credit_card, Unset):
credit_card = UNSET
else:
credit_card = CreditCard.from_dict(_credit_card)
_customer = d.pop("customer", UNSET)
customer: Union[Unset, Customer]
if isinstance(_customer, Unset):
customer = UNSET
else:
customer = Customer.from_dict(_customer)
_date_paid = d.pop("datePaid", UNSET)
date_paid: Union[Unset, datetime.datetime]
if isinstance(_date_paid, Unset):
date_paid = UNSET
else:
date_paid = isoparse(_date_paid)
_date_reconciled = d.pop("dateReconciled", UNSET)
date_reconciled: Union[Unset, datetime.datetime]
if isinstance(_date_reconciled, Unset):
date_reconciled = UNSET
else:
date_reconciled = isoparse(_date_reconciled)
fields = []
_fields = d.pop("fields", UNSET)
for fields_item_data in _fields or []:
fields_item = BookingField.from_dict(fields_item_data)
fields.append(fields_item)
internal_notes = d.pop("internalNotes", UNSET)
items = []
_items = d.pop("items", UNSET)
for items_item_data in _items or []:
items_item = BookingItemCreate.from_dict(items_item_data)
items.append(items_item)
order_number = d.pop("orderNumber", UNSET)
_payment_option = d.pop("paymentOption", UNSET)
payment_option: Union[Unset, BookingCreatePaymentOption]
if isinstance(_payment_option, Unset):
payment_option = UNSET
else:
payment_option = BookingCreatePaymentOption(_payment_option)
payments = []
_payments = d.pop("payments", UNSET)
for payments_item_data in _payments or []:
payments_item = BookingPayment.from_dict(payments_item_data)
payments.append(payments_item)
reseller_comments = d.pop("resellerComments", UNSET)
reseller_id = d.pop("resellerId", UNSET)
reseller_reference = d.pop("resellerReference", UNSET)
_reseller_source = d.pop("resellerSource", UNSET)
reseller_source: Union[Unset, BookingCreateResellerSource]
if isinstance(_reseller_source, Unset):
reseller_source = UNSET
else:
reseller_source = BookingCreateResellerSource(_reseller_source)
_reseller_user = d.pop("resellerUser", UNSET)
reseller_user: Union[Unset, User]
if isinstance(_reseller_user, Unset):
reseller_user = UNSET
else:
reseller_user = User.from_dict(_reseller_user)
send_notifications = d.pop("sendNotifications", UNSET)
_source = d.pop("source", UNSET)
source: Union[Unset, BookingCreateSource]
if isinstance(_source, Unset):
source = UNSET
else:
source = BookingCreateSource(_source)
source_channel = d.pop("sourceChannel", UNSET)
source_referrer = d.pop("sourceReferrer", UNSET)
_status = d.pop("status", UNSET)
status: Union[Unset, BookingCreateStatus]
if isinstance(_status, Unset):
status = UNSET
else:
status = BookingCreateStatus(_status)
surcharge = d.pop("surcharge", UNSET)
total_amount = d.pop("totalAmount", UNSET)
vouchers = cast(List[str], d.pop("vouchers", UNSET))
booking_create = cls(
barcode_type=barcode_type,
comments=comments,
coupon=coupon,
credit_card=credit_card,
customer=customer,
date_paid=date_paid,
date_reconciled=date_reconciled,
fields=fields,
internal_notes=internal_notes,
items=items,
order_number=order_number,
payment_option=payment_option,
payments=payments,
reseller_comments=reseller_comments,
reseller_id=reseller_id,
reseller_reference=reseller_reference,
reseller_source=reseller_source,
reseller_user=reseller_user,
send_notifications=send_notifications,
source=source,
source_channel=source_channel,
source_referrer=source_referrer,
status=status,
surcharge=surcharge,
total_amount=total_amount,
vouchers=vouchers,
)
booking_create.additional_properties = d
return booking_create
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/models/booking_create.py | 0.711832 | 0.41567 | booking_create.py | pypi |
from typing import Any, Dict, List, Type, TypeVar, Union
import attr
from ..models.booking import Booking
from ..models.request_status import RequestStatus
from ..types import UNSET, Unset
T = TypeVar("T", bound="ResponseBooking")
@attr.s(auto_attribs=True)
class ResponseBooking:
"""
Attributes:
request_status (RequestStatus):
booking (Union[Unset, Booking]): Booking object. Lists all the possible fields for all product types and
scenarios. Most of them are not required when sending a new booking.<br>A single Booking can be used to book
multiple products, each of them being a BookingItem. All the products of one booking have to be from the same
supplier.
"""
request_status: RequestStatus
booking: Union[Unset, Booking] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
request_status = self.request_status.to_dict()
booking: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.booking, Unset):
booking = self.booking.to_dict()
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update(
{
"requestStatus": request_status,
}
)
if booking is not UNSET:
field_dict["booking"] = booking
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
request_status = RequestStatus.from_dict(d.pop("requestStatus"))
_booking = d.pop("booking", UNSET)
booking: Union[Unset, Booking]
if isinstance(_booking, Unset):
booking = UNSET
else:
booking = Booking.from_dict(_booking)
response_booking = cls(
request_status=request_status,
booking=booking,
)
response_booking.additional_properties = d
return response_booking
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/models/response_booking.py | 0.789396 | 0.332229 | response_booking.py | pypi |
from typing import Any, Dict, List, Type, TypeVar, Union
import attr
from ..models.request_status import RequestStatus
from ..models.voucher import Voucher
from ..types import UNSET, Unset
T = TypeVar("T", bound="ResponseVoucherList")
@attr.s(auto_attribs=True)
class ResponseVoucherList:
"""
Attributes:
request_status (RequestStatus):
vouchers (Union[Unset, List[Voucher]]):
"""
request_status: RequestStatus
vouchers: Union[Unset, List[Voucher]] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
request_status = self.request_status.to_dict()
vouchers: Union[Unset, List[Dict[str, Any]]] = UNSET
if not isinstance(self.vouchers, Unset):
vouchers = []
for vouchers_item_data in self.vouchers:
vouchers_item = vouchers_item_data.to_dict()
vouchers.append(vouchers_item)
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update(
{
"requestStatus": request_status,
}
)
if vouchers is not UNSET:
field_dict["vouchers"] = vouchers
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
request_status = RequestStatus.from_dict(d.pop("requestStatus"))
vouchers = []
_vouchers = d.pop("vouchers", UNSET)
for vouchers_item_data in _vouchers or []:
vouchers_item = Voucher.from_dict(vouchers_item_data)
vouchers.append(vouchers_item)
response_voucher_list = cls(
request_status=request_status,
vouchers=vouchers,
)
response_voucher_list.additional_properties = d
return response_voucher_list
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties | /rezdy_api_for_suppliers_client-1.0.1.tar.gz/rezdy_api_for_suppliers_client-1.0.1/rezdy_api_for_suppliers_client/models/response_voucher_list.py | 0.758242 | 0.157266 | response_voucher_list.py | pypi |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.