repo_name stringlengths 7 65 | path stringlengths 5 185 | copies stringlengths 1 4 | size stringlengths 4 6 | content stringlengths 977 990k | license stringclasses 14 values | hash stringlengths 32 32 | line_mean float64 7.18 99.4 | line_max int64 31 999 | alpha_frac float64 0.25 0.95 | ratio float64 1.5 7.84 | autogenerated bool 1 class | config_or_test bool 2 classes | has_no_keywords bool 2 classes | has_few_assignments bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
honeynet/droidbot | start.py | 2 | 9318 | # helper file of droidbot
# it parses command arguments and send the options to droidbot
import argparse
from droidbot import input_manager
from droidbot import input_policy
from droidbot import env_manager
from droidbot import DroidBot
from droidbot.droidmaster import DroidMaster
def parse_args():
"""
parse command line input
generate options including host name, port number
"""
parser = argparse.ArgumentParser(description="Start DroidBot to test an Android app.",
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument("-d", action="store", dest="device_serial", required=False,
help="The serial number of target device (use `adb devices` to find)")
parser.add_argument("-a", action="store", dest="apk_path", required=True,
help="The file path to target APK")
parser.add_argument("-o", action="store", dest="output_dir",
help="directory of output")
# parser.add_argument("-env", action="store", dest="env_policy",
# help="policy to set up environment. Supported policies:\n"
# "none\tno environment will be set. App will run in default environment of device; \n"
# "dummy\tadd some fake contacts, SMS log, call log; \n"
# "static\tset environment based on static analysis result; \n"
# "<file>\tget environment policy from a json file.\n")
parser.add_argument("-policy", action="store", dest="input_policy", default=input_manager.DEFAULT_POLICY,
help='Policy to use for test input generation. '
'Default: %s.\nSupported policies:\n' % input_manager.DEFAULT_POLICY +
' \"%s\" -- No event will be sent, user should interact manually with device; \n'
' \"%s\" -- Use "adb shell monkey" to send events; \n'
' \"%s\" -- Explore UI using a naive depth-first strategy;\n'
' \"%s\" -- Explore UI using a greedy depth-first strategy;\n'
' \"%s\" -- Explore UI using a naive breadth-first strategy;\n'
' \"%s\" -- Explore UI using a greedy breadth-first strategy;\n'
%
(
input_policy.POLICY_NONE,
input_policy.POLICY_MONKEY,
input_policy.POLICY_NAIVE_DFS,
input_policy.POLICY_GREEDY_DFS,
input_policy.POLICY_NAIVE_BFS,
input_policy.POLICY_GREEDY_BFS,
))
# for distributed DroidBot
parser.add_argument("-distributed", action="store", dest="distributed", choices=["master", "worker"],
help="Start DroidBot in distributed mode.")
parser.add_argument("-master", action="store", dest="master",
help="DroidMaster's RPC address")
parser.add_argument("-qemu_hda", action="store", dest="qemu_hda",
help="The QEMU's hda image")
parser.add_argument("-qemu_no_graphic", action="store_true", dest="qemu_no_graphic",
help="Run QEMU with -nograpihc parameter")
parser.add_argument("-script", action="store", dest="script_path",
help="Use a script to customize input for certain states.")
parser.add_argument("-count", action="store", dest="count", default=input_manager.DEFAULT_EVENT_COUNT, type=int,
help="Number of events to generate in total. Default: %d" % input_manager.DEFAULT_EVENT_COUNT)
parser.add_argument("-interval", action="store", dest="interval", default=input_manager.DEFAULT_EVENT_INTERVAL,
type=int,
help="Interval in seconds between each two events. Default: %d" % input_manager.DEFAULT_EVENT_INTERVAL)
parser.add_argument("-timeout", action="store", dest="timeout", default=input_manager.DEFAULT_TIMEOUT, type=int,
help="Timeout in seconds, -1 means unlimited. Default: %d" % input_manager.DEFAULT_TIMEOUT)
parser.add_argument("-cv", action="store_true", dest="cv_mode",
help="Use OpenCV (instead of UIAutomator) to identify UI components. CV mode requires opencv-python installed.")
parser.add_argument("-debug", action="store_true", dest="debug_mode",
help="Run in debug mode (dump debug messages).")
parser.add_argument("-random", action="store_true", dest="random_input",
help="Add randomness to input events.")
parser.add_argument("-keep_app", action="store_true", dest="keep_app",
help="Keep the app on the device after testing.")
parser.add_argument("-keep_env", action="store_true", dest="keep_env",
help="Keep the test environment (eg. minicap and accessibility service) after testing.")
parser.add_argument("-use_method_profiling", action="store", dest="profiling_method",
help="Record method trace for each event. can be \"full\" or a sampling rate.")
parser.add_argument("-grant_perm", action="store_true", dest="grant_perm",
help="Grant all permissions while installing. Useful for Android 6.0+.")
parser.add_argument("-is_emulator", action="store_true", dest="is_emulator",
help="Declare the target device to be an emulator, which would be treated specially by DroidBot.")
parser.add_argument("-accessibility_auto", action="store_true", dest="enable_accessibility_hard",
help="Enable the accessibility service automatically even though it might require device restart\n(can be useful for Android API level < 23).")
parser.add_argument("-humanoid", action="store", dest="humanoid",
help="Connect to a Humanoid service (addr:port) for more human-like behaviors.")
parser.add_argument("-ignore_ad", action="store_true", dest="ignore_ad",
help="Ignore Ad views by checking resource_id.")
parser.add_argument("-replay_output", action="store", dest="replay_output",
help="The droidbot output directory being replayed.")
options = parser.parse_args()
# print options
return options
def main():
"""
the main function
it starts a droidbot according to the arguments given in cmd line
"""
opts = parse_args()
import os
if not os.path.exists(opts.apk_path):
print("APK does not exist.")
return
if not opts.output_dir and opts.cv_mode:
print("To run in CV mode, you need to specify an output dir (using -o option).")
if opts.distributed:
if opts.distributed == "master":
start_mode = "master"
else:
start_mode = "worker"
else:
start_mode = "normal"
if start_mode == "master":
droidmaster = DroidMaster(
app_path=opts.apk_path,
is_emulator=opts.is_emulator,
output_dir=opts.output_dir,
# env_policy=opts.env_policy,
env_policy=env_manager.POLICY_NONE,
policy_name=opts.input_policy,
random_input=opts.random_input,
script_path=opts.script_path,
event_interval=opts.interval,
timeout=opts.timeout,
event_count=opts.count,
cv_mode=opts.cv_mode,
debug_mode=opts.debug_mode,
keep_app=opts.keep_app,
keep_env=opts.keep_env,
profiling_method=opts.profiling_method,
grant_perm=opts.grant_perm,
enable_accessibility_hard=opts.enable_accessibility_hard,
qemu_hda=opts.qemu_hda,
qemu_no_graphic=opts.qemu_no_graphic,
humanoid=opts.humanoid,
ignore_ad=opts.ignore_ad,
replay_output=opts.replay_output)
droidmaster.start()
else:
droidbot = DroidBot(
app_path=opts.apk_path,
device_serial=opts.device_serial,
is_emulator=opts.is_emulator,
output_dir=opts.output_dir,
# env_policy=opts.env_policy,
env_policy=env_manager.POLICY_NONE,
policy_name=opts.input_policy,
random_input=opts.random_input,
script_path=opts.script_path,
event_interval=opts.interval,
timeout=opts.timeout,
event_count=opts.count,
cv_mode=opts.cv_mode,
debug_mode=opts.debug_mode,
keep_app=opts.keep_app,
keep_env=opts.keep_env,
profiling_method=opts.profiling_method,
grant_perm=opts.grant_perm,
enable_accessibility_hard=opts.enable_accessibility_hard,
master=opts.master,
humanoid=opts.humanoid,
ignore_ad=opts.ignore_ad,
replay_output=opts.replay_output)
droidbot.start()
return
if __name__ == "__main__":
main()
| mit | 2db5eb9be0c1272e14bc704479ccae7d | 52.551724 | 167 | 0.580704 | 4.202977 | false | false | false | false |
humancompatibleai/imitation | src/imitation/util/video_wrapper.py | 1 | 2617 | """Wrapper to record rendered video frames from an environment."""
import os
import gym
from gym.wrappers.monitoring import video_recorder
from imitation.data import types
class VideoWrapper(gym.Wrapper):
"""Creates videos from wrapped environment by calling render after each timestep."""
def __init__(
self,
env: gym.Env,
directory: types.AnyPath,
single_video: bool = True,
):
"""Builds a VideoWrapper.
Args:
env: the wrapped environment.
directory: the output directory.
single_video: if True, generates a single video file, with episodes
concatenated. If False, a new video file is created for each episode.
Usually a single video file is what is desired. However, if one is
searching for an interesting episode (perhaps by looking at the
metadata), then saving to different files can be useful.
"""
super().__init__(env)
self.episode_id = 0
self.video_recorder = None
self.single_video = single_video
self.directory = os.path.abspath(directory)
os.makedirs(self.directory)
def _reset_video_recorder(self) -> None:
"""Creates a video recorder if one does not already exist.
Called at the start of each episode (by `reset`). When a video recorder is
already present, it will only create a new one if `self.single_video == False`.
"""
if self.video_recorder is not None:
# Video recorder already started.
if not self.single_video:
# We want a new video for each episode, so destroy current recorder.
self.video_recorder.close()
self.video_recorder = None
if self.video_recorder is None:
# No video recorder -- start a new one.
self.video_recorder = video_recorder.VideoRecorder(
env=self.env,
base_path=os.path.join(
self.directory, "video.{:06}".format(self.episode_id)
),
metadata={"episode_id": self.episode_id},
)
def reset(self):
self._reset_video_recorder()
self.episode_id += 1
return self.env.reset()
def step(self, action):
res = self.env.step(action)
self.video_recorder.capture_frame()
return res
def close(self) -> None:
if self.video_recorder is not None:
self.video_recorder.close()
self.video_recorder = None
super().close()
| mit | 5801ee530b70668c64ff9ecd34eac6f2 | 33.434211 | 88 | 0.591517 | 4.458262 | false | false | false | false |
humancompatibleai/imitation | src/imitation/algorithms/bc.py | 1 | 15636 | """Behavioural Cloning (BC).
Trains policy by applying supervised learning to a fixed dataset of (observation,
action) pairs generated by some expert demonstrator.
"""
import contextlib
from typing import Any, Callable, Dict, Iterable, Mapping, Optional, Tuple, Type, Union
import gym
import numpy as np
import torch as th
import torch.utils.data as th_data
import tqdm.autonotebook as tqdm
from stable_baselines3.common import logger, policies, utils, vec_env
from imitation.data import rollout, types
from imitation.policies import base
def reconstruct_policy(
policy_path: str,
device: Union[th.device, str] = "auto",
) -> policies.BasePolicy:
"""Reconstruct a saved policy.
Args:
policy_path: path where `.save_policy()` has been run.
device: device on which to load the policy.
Returns:
policy: policy with reloaded weights.
"""
policy = th.load(policy_path, map_location=utils.get_device(device))
assert isinstance(policy, policies.BasePolicy)
return policy
class ConstantLRSchedule:
"""A callable that returns a constant learning rate."""
def __init__(self, lr: float = 1e-3):
"""
Args:
lr: the constant learning rate that calls to this object will return.
"""
self.lr = lr
def __call__(self, _):
"""
Returns the constant learning rate.
"""
return self.lr
class _NoopTqdm:
"""Dummy replacement for tqdm.tqdm() when we don't want a progress bar visible."""
def close(self):
pass
def set_description(self, s):
pass
def update(self, n):
pass
class EpochOrBatchIteratorWithProgress:
def __init__(
self,
data_loader: Iterable[dict],
n_epochs: Optional[int] = None,
n_batches: Optional[int] = None,
on_epoch_end: Optional[Callable[[], None]] = None,
on_batch_end: Optional[Callable[[], None]] = None,
progress_bar_visible: bool = True,
):
"""Wraps DataLoader so that all BC batches can be processed in a one for-loop.
Also uses `tqdm` to show progress in stdout.
Args:
data_loader: An iterable over data dicts, as used in `BC`.
n_epochs: The number of epochs to iterate through in one call to
__iter__. Exactly one of `n_epochs` and `n_batches` should be provided.
n_batches: The number of batches to iterate through in one call to
__iter__. Exactly one of `n_epochs` and `n_batches` should be provided.
on_epoch_end: A callback function without parameters to be called at the
end of every epoch.
on_batch_end: A callback function without parameters to be called at the
end of every batch.
progress_bar_visible: If True, then show a tqdm progress bar.
"""
if n_epochs is not None and n_batches is None:
self.use_epochs = True
elif n_epochs is None and n_batches is not None:
self.use_epochs = False
else:
raise ValueError(
"Must provide exactly one of `n_epochs` and `n_batches` arguments."
)
self.data_loader = data_loader
self.n_epochs = n_epochs
self.n_batches = n_batches
self.on_epoch_end = on_epoch_end
self.on_batch_end = on_batch_end
self.progress_bar_visible = progress_bar_visible
def __iter__(self) -> Iterable[Tuple[dict, dict]]:
"""Yields batches while updating tqdm display to display progress."""
samples_so_far = 0
epoch_num = 0
batch_num = 0
batch_suffix = epoch_suffix = ""
if self.progress_bar_visible:
if self.use_epochs:
display = tqdm.tqdm(total=self.n_epochs)
epoch_suffix = f"/{self.n_epochs}"
else: # Use batches.
display = tqdm.tqdm(total=self.n_batches)
batch_suffix = f"/{self.n_batches}"
else:
display = _NoopTqdm()
def update_desc():
display.set_description(
f"batch: {batch_num}{batch_suffix} epoch: {epoch_num}{epoch_suffix}"
)
with contextlib.closing(display):
while True:
update_desc()
got_data_on_epoch = False
for batch in self.data_loader:
got_data_on_epoch = True
batch_num += 1
batch_size = len(batch["obs"])
assert batch_size > 0
samples_so_far += batch_size
stats = dict(
epoch_num=epoch_num,
batch_num=batch_num,
samples_so_far=samples_so_far,
)
yield batch, stats
if self.on_batch_end is not None:
self.on_batch_end()
if not self.use_epochs:
update_desc()
display.update(1)
if batch_num >= self.n_batches:
return
if not got_data_on_epoch:
raise AssertionError(
f"Data loader returned no data after "
f"{batch_num} batches, during epoch "
f"{epoch_num} -- did it reset correctly?"
)
epoch_num += 1
if self.on_epoch_end is not None:
self.on_epoch_end()
if self.use_epochs:
update_desc()
display.update(1)
if epoch_num >= self.n_epochs:
return
class BC:
DEFAULT_BATCH_SIZE: int = 32
"""Default batch size for DataLoader automatically constructed from Transitions.
See `set_expert_data_loader()`.
"""
# TODO(scottemmons): pass BasePolicy into BC directly (rather than passing its
# arguments)
def __init__(
self,
observation_space: gym.Space,
action_space: gym.Space,
*,
policy_class: Type[policies.BasePolicy] = base.FeedForward32Policy,
policy_kwargs: Optional[Mapping[str, Any]] = None,
expert_data: Union[Iterable[Mapping], types.TransitionsMinimal, None] = None,
optimizer_cls: Type[th.optim.Optimizer] = th.optim.Adam,
optimizer_kwargs: Optional[Dict[str, Any]] = None,
ent_weight: float = 1e-3,
l2_weight: float = 0.0,
device: Union[str, th.device] = "auto",
):
"""Behavioral cloning (BC).
Recovers a policy via supervised learning on observation-action Tensor
pairs, sampled from a Torch DataLoader or any Iterator that ducktypes
`torch.utils.data.DataLoader`.
Args:
observation_space: the observation space of the environment.
action_space: the action space of the environment.
policy_class: used to instantiate imitation policy.
policy_kwargs: keyword arguments passed to policy's constructor.
expert_data: If not None, then immediately call
`self.set_expert_data_loader(expert_data)` during initialization.
optimizer_cls: optimiser to use for supervised training.
optimizer_kwargs: keyword arguments, excluding learning rate and
weight decay, for optimiser construction.
ent_weight: scaling applied to the policy's entropy regularization.
l2_weight: scaling applied to the policy's L2 regularization.
device: name/identity of device to place policy on.
"""
if optimizer_kwargs:
if "weight_decay" in optimizer_kwargs:
raise ValueError("Use the parameter l2_weight instead of weight_decay.")
self.action_space = action_space
self.observation_space = observation_space
self.policy_class = policy_class
self.device = device = utils.get_device(device)
self.policy_kwargs = dict(
observation_space=self.observation_space,
action_space=self.action_space,
lr_schedule=ConstantLRSchedule(),
)
self.policy_kwargs.update(policy_kwargs or {})
self.device = utils.get_device(device)
self.policy = self.policy_class(**self.policy_kwargs).to(
self.device
) # pytype: disable=not-instantiable
optimizer_kwargs = optimizer_kwargs or {}
self.optimizer = optimizer_cls(self.policy.parameters(), **optimizer_kwargs)
self.expert_data_loader: Optional[Iterable[Mapping]] = None
self.ent_weight = ent_weight
self.l2_weight = l2_weight
if expert_data is not None:
self.set_expert_data_loader(expert_data)
def set_expert_data_loader(
self,
expert_data: Union[Iterable[Mapping], types.TransitionsMinimal],
) -> None:
"""Set the expert data loader, which yields batches of obs-act pairs.
Changing the expert data loader on-demand is useful for DAgger and other
interactive algorithms.
Args:
expert_data: Either a Torch `DataLoader`, any other iterator that
yields dictionaries containing "obs" and "acts" Tensors or Numpy arrays,
or a `TransitionsMinimal` instance.
If this is a `TransitionsMinimal` instance, then it is automatically
converted into a shuffled `DataLoader` with batch size
`BC.DEFAULT_BATCH_SIZE`.
"""
if isinstance(expert_data, types.TransitionsMinimal):
self.expert_data_loader = th_data.DataLoader(
expert_data,
shuffle=True,
batch_size=BC.DEFAULT_BATCH_SIZE,
collate_fn=types.transitions_collate_fn,
)
else:
self.expert_data_loader = expert_data
def _calculate_loss(
self,
obs: Union[th.Tensor, np.ndarray],
acts: Union[th.Tensor, np.ndarray],
) -> Tuple[th.Tensor, Dict[str, float]]:
"""
Calculate the supervised learning loss used to train the behavioral clone.
Args:
obs: The observations seen by the expert. If this is a Tensor, then
gradients are detached first before loss is calculated.
acts: The actions taken by the expert. If this is a Tensor, then its
gradients are detached first before loss is calculated.
Returns:
loss: The supervised learning loss for the behavioral clone to optimize.
stats_dict: Statistics about the learning process to be logged.
"""
obs = th.as_tensor(obs, device=self.device).detach()
acts = th.as_tensor(acts, device=self.device).detach()
_, log_prob, entropy = self.policy.evaluate_actions(obs, acts)
prob_true_act = th.exp(log_prob).mean()
log_prob = log_prob.mean()
entropy = entropy.mean()
l2_norms = [th.sum(th.square(w)) for w in self.policy.parameters()]
l2_norm = sum(l2_norms) / 2 # divide by 2 to cancel with gradient of square
ent_loss = -self.ent_weight * entropy
neglogp = -log_prob
l2_loss = self.l2_weight * l2_norm
loss = neglogp + ent_loss + l2_loss
stats_dict = dict(
neglogp=neglogp.item(),
loss=loss.item(),
entropy=entropy.item(),
ent_loss=ent_loss.item(),
prob_true_act=prob_true_act.item(),
l2_norm=l2_norm.item(),
l2_loss=l2_loss.item(),
)
return loss, stats_dict
def train(
self,
*,
n_epochs: Optional[int] = None,
n_batches: Optional[int] = None,
on_epoch_end: Callable[[], None] = None,
on_batch_end: Callable[[], None] = None,
log_interval: int = 100,
log_rollouts_venv: Optional[vec_env.VecEnv] = None,
log_rollouts_n_episodes: int = 5,
progress_bar: bool = True,
):
"""Train with supervised learning for some number of epochs.
Here an 'epoch' is just a complete pass through the expert data loader,
as set by `self.set_expert_data_loader()`.
Args:
n_epochs: Number of complete passes made through expert data before ending
training. Provide exactly one of `n_epochs` and `n_batches`.
n_batches: Number of batches loaded from dataset before ending training.
Provide exactly one of `n_epochs` and `n_batches`.
on_epoch_end: Optional callback with no parameters to run at the end of each
epoch.
on_batch_end: Optional callback with no parameters to run at the end of each
batch.
log_interval: Log stats after every log_interval batches.
log_rollouts_venv: If not None, then this VecEnv (whose observation and
actions spaces must match `self.observation_space` and
`self.action_space`) is used to generate rollout stats, including
average return and average episode length. If None, then no rollouts
are generated.
log_rollouts_n_episodes: Number of rollouts to generate when calculating
rollout stats. Non-positive number disables rollouts.
progress_bar: If True, then show a progress bar during training.
"""
it = EpochOrBatchIteratorWithProgress(
self.expert_data_loader,
n_epochs=n_epochs,
n_batches=n_batches,
on_epoch_end=on_epoch_end,
on_batch_end=on_batch_end,
progress_bar_visible=progress_bar,
)
batch_num = 0
for batch, stats_dict_it in it:
loss, stats_dict_loss = self._calculate_loss(batch["obs"], batch["acts"])
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
if batch_num % log_interval == 0:
for stats in [stats_dict_it, stats_dict_loss]:
for k, v in stats.items():
logger.record(k, v)
# TODO(shwang): Maybe instead use a callback that can be shared between
# all algorithms' `.train()` for generating rollout stats.
# EvalCallback could be a good fit:
# https://stable-baselines3.readthedocs.io/en/master/guide/callbacks.html#evalcallback
if log_rollouts_venv is not None and log_rollouts_n_episodes > 0:
trajs = rollout.generate_trajectories(
self.policy,
log_rollouts_venv,
rollout.min_episodes(log_rollouts_n_episodes),
)
stats = rollout.rollout_stats(trajs)
logger.record("batch_size", len(batch["obs"]))
for k, v in stats.items():
if "return" in k and "monitor" not in k:
logger.record("rollout/" + k, v)
logger.dump(batch_num)
batch_num += 1
def save_policy(self, policy_path: types.AnyPath) -> None:
"""Save policy to a path. Can be reloaded by `.reconstruct_policy()`.
Args:
policy_path: path to save policy to.
"""
th.save(self.policy, policy_path)
| mit | ca1fd1671cae20fb706222bf7304b7b5 | 37.895522 | 104 | 0.57406 | 4.191957 | false | false | false | false |
humancompatibleai/imitation | src/imitation/envs/examples/airl_envs/__init__.py | 1 | 1244 | from typing import Optional
from gym.envs import register as gym_register
_ENTRY_POINT_PREFIX = "imitation.envs.examples.airl_envs"
def _register(env_name: str, entry_point: str, kwargs: Optional[dict] = None):
entry_point = f"{_ENTRY_POINT_PREFIX}.{entry_point}"
gym_register(id=env_name, entry_point=entry_point, kwargs=kwargs)
def _point_maze_register():
for dname, dval in {"Left": 0, "Right": 1}.items():
for vname, vval in {"": False, "Vel": True}.items():
_register(
f"imitation/PointMaze{dname}{vname}-v0",
entry_point="point_maze_env:PointMazeEnv",
kwargs={"direction": dval, "include_vel": vval},
)
_register(
"imitation/ObjPusher-v0",
entry_point="pusher_env:PusherEnv",
kwargs={"sparse_reward": False},
)
_register("imitation/TwoDMaze-v0", entry_point="twod_maze:TwoDMaze")
_point_maze_register()
# A modified ant which flips over less and learns faster via TRPO
_register(
"imitation/CustomAnt-v0",
entry_point="ant_env:CustomAntEnv",
kwargs={"gear": 30, "disabled": False},
)
_register(
"imitation/DisabledAnt-v0",
entry_point="ant_env:CustomAntEnv",
kwargs={"gear": 30, "disabled": True},
)
| mit | bdcd8554d52566700e029695a60b379f | 28.619048 | 78 | 0.645498 | 3.079208 | false | false | false | false |
humancompatibleai/imitation | src/imitation/util/networks.py | 1 | 2430 | """Helper methods to build and run neural networks."""
import collections
from typing import Iterable, Optional, Type
from torch import nn
class SqueezeLayer(nn.Module):
"""Torch module that squeezes a B*1 tensor down into a size-B vector."""
def forward(self, x):
assert x.ndim == 2 and x.shape[1] == 1
new_value = x.squeeze(1)
assert new_value.ndim == 1
return new_value
def build_mlp(
in_size: int,
hid_sizes: Iterable[int],
out_size: int = 1,
name: Optional[str] = None,
activation: Type[nn.Module] = nn.ReLU,
squeeze_output=False,
flatten_input=False,
) -> nn.Module:
"""Constructs a Torch MLP.
Args:
in_size: size of individual input vectors; input to the MLP will be of
shape (batch_size, in_size).
hid_sizes: sizes of hidden layers. If this is an empty iterable, then we build
a linear function approximator.
out_size: required size of output vector.
activation: activation to apply after hidden layers.
squeeze_output: if out_size=1, then squeeze_input=True ensures that MLP
output is of size (B,) instead of (B,1).
flatten_input: should input be flattened along axes 1, 2, 3, …? Useful
if you want to, e.g., process small images inputs with an MLP.
Returns:
nn.Module: an MLP mapping from inputs of size (batch_size, in_size) to
(batch_size, out_size), unless out_size=1 and squeeze_output=True,
in which case the output is of size (batch_size, ).
Raises:
ValueError: if squeeze_output was supplied with out_size!=1."""
layers = collections.OrderedDict()
if name is None:
prefix = ""
else:
prefix = f"{name}_"
if flatten_input:
layers[f"{prefix}flatten"] = nn.Flatten()
# Hidden layers
prev_size = in_size
for i, size in enumerate(hid_sizes):
layers[f"{prefix}dense{i}"] = nn.Linear(prev_size, size)
prev_size = size
if activation:
layers[f"{prefix}act{i}"] = activation()
# Final layer
layers[f"{prefix}dense_final"] = nn.Linear(prev_size, out_size)
if squeeze_output:
if out_size != 1:
raise ValueError("squeeze_output is only applicable when out_size=1")
layers[f"{prefix}squeeze"] = SqueezeLayer()
model = nn.Sequential(layers)
return model
| mit | 82c99bdaf15004ee39f00f4dce85923d | 30.532468 | 86 | 0.621911 | 3.712538 | false | false | false | false |
humancompatibleai/imitation | src/imitation/scripts/config/expert_demos.py | 1 | 4739 | import os
import sacred
from imitation.scripts.config.common import DEFAULT_INIT_RL_KWARGS
from imitation.util import util
expert_demos_ex = sacred.Experiment("expert_demos")
@expert_demos_ex.config
def expert_demos_defaults():
env_name = "CartPole-v1" # The gym.Env name
total_timesteps = int(1e6) # Number of training timesteps in model.learn()
num_vec = 8 # Number of environments in VecEnv
parallel = True # Use SubprocVecEnv (generally faster if num_vec>1)
normalize = True # Use VecNormalize
normalize_kwargs = dict() # kwargs for `VecNormalize`
max_episode_steps = None # Set to positive int to limit episode horizons
n_episodes_eval = 50 # Num of episodes for final ep reward mean evaluation
init_rl_kwargs = dict(DEFAULT_INIT_RL_KWARGS)
# If specified, overrides the ground-truth environment reward
reward_type = None # override reward type
reward_path = None # override reward path
rollout_save_final = True # If True, save after training is finished.
rollout_save_n_timesteps = None # Min timesteps saved per file, optional.
rollout_save_n_episodes = None # Num episodes saved per file, optional.
policy_save_interval = 10000 # Num timesteps between saves (<=0 disables)
policy_save_final = True # If True, save after training is finished.
init_tensorboard = False # If True, then write Tensorboard logs.
log_root = os.path.join("output", "expert_demos") # output directory
@expert_demos_ex.config
def default_end_cond(rollout_save_n_timesteps, rollout_save_n_episodes):
# Only set default if both end cond options are None.
# This way the Sacred CLI caller can set `rollout_save_n_episodes` only
# without getting an error that `rollout_save_n_timesteps is not None`.
if rollout_save_n_timesteps is None and rollout_save_n_episodes is None:
rollout_save_n_timesteps = 2000 # Min timesteps saved per file, optional.
@expert_demos_ex.config
def logging(env_name, log_root):
log_dir = os.path.join(
log_root, env_name.replace("/", "_"), util.make_unique_timestamp()
)
@expert_demos_ex.config
def rollouts_from_policy_only_defaults(log_dir):
policy_path = None # Policy path for rollouts_from_policy command only
policy_type = "ppo" # Policy type for rollouts_from_policy command only
rollout_save_path = os.path.join(
log_dir, "rollout.pkl"
) # Save path for `rollouts_from_policy` only.
# Standard Gym env configs
@expert_demos_ex.named_config
def acrobot():
env_name = "Acrobot-v1"
@expert_demos_ex.named_config
def ant():
env_name = "Ant-v2"
locals().update(**ant_shared_locals)
@expert_demos_ex.named_config
def cartpole():
env_name = "CartPole-v1"
total_timesteps = int(1e5)
@expert_demos_ex.named_config
def seals_cartpole():
env_name = "seals/CartPole-v0"
total_timesteps = int(1e6)
@expert_demos_ex.named_config
def half_cheetah():
env_name = "HalfCheetah-v2"
total_timesteps = int(5e6) # does OK after 1e6, but continues improving
@expert_demos_ex.named_config
def hopper():
# TODO(adam): upgrade to Hopper-v3?
env_name = "Hopper-v2"
@expert_demos_ex.named_config
def humanoid():
env_name = "Humanoid-v2"
init_rl_kwargs = dict(
n_steps=2048,
) # batch size of 2048*8=16384 due to num_vec
total_timesteps = int(10e6) # fairly discontinuous, needs at least 5e6
@expert_demos_ex.named_config
def mountain_car():
env_name = "MountainCar-v0"
@expert_demos_ex.named_config
def seals_mountain_car():
env_name = "seals/MountainCar-v0"
@expert_demos_ex.named_config
def pendulum():
env_name = "Pendulum-v0"
@expert_demos_ex.named_config
def reacher():
env_name = "Reacher-v2"
@expert_demos_ex.named_config
def swimmer():
env_name = "Swimmer-v2"
@expert_demos_ex.named_config
def walker():
env_name = "Walker2d-v2"
# Custom env configs
@expert_demos_ex.named_config
def custom_ant():
env_name = "imitation/CustomAnt-v0"
locals().update(**ant_shared_locals)
@expert_demos_ex.named_config
def disabled_ant():
env_name = "imitation/DisabledAnt-v0"
locals().update(**ant_shared_locals)
@expert_demos_ex.named_config
def two_d_maze():
env_name = "imitation/TwoDMaze-v0"
# Debug configs
@expert_demos_ex.named_config
def fast():
"""Intended for testing purposes: small # of updates, ends quickly."""
total_timesteps = int(1)
max_episode_steps = int(1)
# Shared settings
ant_shared_locals = dict(
init_rl_kwargs=dict(
n_steps=2048,
), # batch size of 2048*8=16384 due to num_vec
total_timesteps=int(5e6),
max_episode_steps=500, # To match `inverse_rl` settings.
)
| mit | 69284d1c3b3c4d4a11ab24a7a058d6c2 | 25.327778 | 82 | 0.69023 | 2.954489 | false | true | false | false |
humancompatibleai/imitation | src/imitation/rewards/discrim_nets.py | 1 | 12334 | import abc
import logging
from typing import Optional
import gym
import numpy as np
import torch as th
import torch.nn.functional as F
from stable_baselines3.common import preprocessing
from torch import nn
from imitation.rewards import common as rewards_common
from imitation.rewards import reward_nets
from imitation.util import networks
class DiscrimNet(nn.Module, abc.ABC):
"""Abstract base class for discriminator, used in AIRL and GAIL."""
def __init__(
self,
observation_space: gym.Space,
action_space: gym.Space,
normalize_images: bool = False,
):
super().__init__()
self.observation_space = observation_space
self.action_space = action_space
self.normalize_images = normalize_images
@abc.abstractmethod
def logits_gen_is_high(
self,
state: th.Tensor,
action: th.Tensor,
next_state: th.Tensor,
done: th.Tensor,
log_policy_act_prob: Optional[th.Tensor] = None,
) -> th.Tensor:
"""Compute the discriminator's logits for each state-action sample.
A high value corresponds to predicting generator, and a low value corresponds to
predicting expert.
Args:
state: state at time t.
action: action taken at time t.
next_state: state at time t+1.
done: binary episode completion flag after action at time t.
log_policy_act_prob: log policy of novice taking `action`. This is
only used for AIRL.
Returns:
disc_logits_gen_is_high: discriminator logits for a sigmoid
activation. A high output indicates a generator-like transition.
"""
def disc_loss(self, disc_logits_gen_is_high, labels_gen_is_one) -> th.Tensor:
"""Compute discriminator loss.
Args:
disc_logits_gen_is_high: discriminator logits, as produced by
`logits_gen_is_high`.
labels_gen_is_one: integer labels, with zero for expert and one for
generator (novice).
Returns:
loss: scalar-valued discriminator loss."""
return F.binary_cross_entropy_with_logits(
disc_logits_gen_is_high, labels_gen_is_one.float()
)
def device(self) -> th.device:
"""Heuristic to determine which device this module is on."""
first_param = next(self.parameters())
return first_param.device
@abc.abstractmethod
def reward_test(
self,
state: th.Tensor,
action: th.Tensor,
next_state: th.Tensor,
done: th.Tensor,
) -> th.Tensor:
"""Test-time reward for given states/actions."""
@abc.abstractmethod
def reward_train(
self,
state: th.Tensor,
action: th.Tensor,
next_state: th.Tensor,
done: th.Tensor,
) -> th.Tensor:
"""Train-time reward for given states/actions."""
def predict_reward_train(
self,
state: np.ndarray,
action: np.ndarray,
next_state: np.ndarray,
done: np.ndarray,
) -> np.ndarray:
"""Vectorized reward for training an imitation learning algorithm.
Args:
state: The observation input. Its shape is
`(batch_size,) + observation_space.shape`.
action: The action input. Its shape is
`(batch_size,) + action_space.shape`. The None dimension is
expected to be the same as None dimension from `obs_input`.
next_state: The observation input. Its shape is
`(batch_size,) + observation_space.shape`.
done: Whether the episode has terminated. Its shape is `(batch_size,)`.
Returns:
The rewards. Its shape is `(batch_size,)`.
"""
return self._eval_reward(
is_train=True, state=state, action=action, next_state=next_state, done=done
)
def predict_reward_test(
self,
state: np.ndarray,
action: np.ndarray,
next_state: np.ndarray,
done: np.ndarray,
) -> np.ndarray:
"""Vectorized reward for training an expert during transfer learning.
Args:
state: The observation input. Its shape is
`(batch_size,) + observation_space.shape`.
act: The action input. Its shape is
`(batch_size,) + action_space.shape`. The None dimension is
expected to be the same as None dimension from `obs_input`.
next_state: The observation input. Its shape is
`(batch_size,) + observation_space.shape`.
done: Whether the episode has terminated. Its shape is `(batch_size,)`.
Returns:
The rewards. Its shape is `(batch_size,)`.
"""
return self._eval_reward(
is_train=False, state=state, action=action, next_state=next_state, done=done
)
def _eval_reward(
self,
is_train: bool,
state: th.Tensor,
action: th.Tensor,
next_state: th.Tensor,
done: th.Tensor,
) -> np.ndarray:
(
state_th,
action_th,
next_state_th,
done_th,
) = rewards_common.disc_rew_preprocess_inputs(
observation_space=self.observation_space,
action_space=self.action_space,
state=state,
action=action,
next_state=next_state,
done=done,
device=self.device(),
normalize_images=self.normalize_images,
)
with th.no_grad():
if is_train:
rew_th = self.reward_train(state_th, action_th, next_state_th, done_th)
else:
rew_th = self.reward_test(state_th, action_th, next_state_th, done_th)
rew = rew_th.detach().cpu().numpy().flatten()
assert rew.shape == (len(state),)
return rew
class DiscrimNetAIRL(DiscrimNet):
r"""The AIRL discriminator for a given RewardNet.
The AIRL discriminator is of the form
.. math:: D_{\theta}(s,a) = \frac{\exp(f_{\theta}(s,a)}{\exp(f_{\theta}(s,a) + \pi(a \mid s)}
where :math:`f_{\theta}` is `self.reward_net`.
""" # noqa: E501
def __init__(self, reward_net: reward_nets.RewardNet, entropy_weight: float = 1.0):
"""Builds a DiscrimNetAIRL.
Args:
reward_net: A RewardNet, used as $f_{\theta}$ in the discriminator.
entropy_weight: The coefficient for the entropy regularization term.
To match the AIRL derivation, it should be 1.0.
However, empirically a lower value sometimes work better.
"""
super().__init__(
observation_space=reward_net.observation_space,
action_space=reward_net.action_space,
)
self.reward_net = reward_net
# if the reward net has potential shaping, we disable that for testing
if isinstance(reward_net, reward_nets.ShapedRewardNet):
self.test_reward_net = reward_net.base
else:
self.test_reward_net = reward_net
self.entropy_weight = entropy_weight
logging.info("Using AIRL")
def logits_gen_is_high(
self,
state: th.Tensor,
action: th.Tensor,
next_state: th.Tensor,
done: th.Tensor,
log_policy_act_prob: th.Tensor,
) -> th.Tensor:
"""Compute the discriminator's logits for each state-action sample.
A high value corresponds to predicting generator, and a low value corresponds to
predicting expert.
"""
reward_output_train = self.reward_net(state, action, next_state, done)
# In Fu's AIRL paper (https://arxiv.org/pdf/1710.11248.pdf), the
# discriminator output was given as exp(r_theta(s,a)) /
# (exp(r_theta(s,a)) - log pi(a|s)), with a high value corresponding to
# expert and a low value corresponding to generator (the opposite of
# our convention).
#
# Observe that sigmoid(log pi(a|s) - r(s,a)) = exp(log pi(a|s) -
# r(s,a)) / (1 + exp(log pi(a|s) - r(s,a))). If we multiply through by
# exp(r(s,a)), we get pi(a|s) / (pi(a|s) + exp(r(s,a))). This is the
# original AIRL discriminator expression with reversed logits to match
# our convention of low = expert and high = generator (like GAIL).
return log_policy_act_prob - reward_output_train
def reward_test(
self,
state: th.Tensor,
action: th.Tensor,
next_state: th.Tensor,
done: th.Tensor,
) -> th.Tensor:
rew = self.test_reward_net(state, action, next_state, done)
assert rew.shape == state.shape[:1]
return rew
def reward_train(
self,
state: th.Tensor,
action: th.Tensor,
next_state: th.Tensor,
done: th.Tensor,
) -> th.Tensor:
"""Compute train reward.
Computed reward does *not* include an entropy bonus. Instead, the
entropy bonus should be added directly to PPO, SAC, etc."""
rew = self.reward_net(state, action, next_state, done)
assert rew.shape == state.shape[:1]
return rew
class ActObsMLP(nn.Module):
"""Simple MLP that takes an action and observation and produces a single
output."""
def __init__(
self, action_space: gym.Space, observation_space: gym.Space, **mlp_kwargs
):
super().__init__()
in_size = preprocessing.get_flattened_obs_dim(
observation_space
) + preprocessing.get_flattened_obs_dim(action_space)
self.mlp = networks.build_mlp(
**{"in_size": in_size, "out_size": 1, **mlp_kwargs}
)
def forward(self, obs: th.Tensor, acts: th.Tensor) -> th.Tensor:
cat_inputs = th.cat((obs, acts), dim=1)
outputs = self.mlp(cat_inputs)
return outputs.squeeze(1)
class DiscrimNetGAIL(DiscrimNet):
"""The discriminator to use for GAIL."""
def __init__(
self,
observation_space: gym.Space,
action_space: gym.Space,
discrim_net: Optional[nn.Module] = None,
normalize_images: bool = False,
):
"""Construct discriminator network.
Args:
observation_space: observation space for this environment.
action_space: action space for this environment:
discrim_net: a Torch module that takes an observation and action
tensor as input, then computes the logits for GAIL.
normalize_images: should image observations be normalized to [0, 1]?
"""
super().__init__(
observation_space=observation_space,
action_space=action_space,
normalize_images=normalize_images,
)
if discrim_net is None:
self.discriminator = ActObsMLP(
action_space=action_space,
observation_space=observation_space,
hid_sizes=(32, 32),
)
else:
self.discriminator = discrim_net
logging.info("using GAIL")
def logits_gen_is_high(
self,
state: th.Tensor,
action: th.Tensor,
next_state: th.Tensor,
done: th.Tensor,
log_policy_act_prob: Optional[th.Tensor] = None,
) -> th.Tensor:
"""Compute the discriminator's logits for each state-action sample.
A high value corresponds to predicting generator, and a low value corresponds to
predicting expert.
"""
logits = self.discriminator(state, action)
return logits
def reward_test(
self,
state: th.Tensor,
action: th.Tensor,
next_state: th.Tensor,
done: th.Tensor,
) -> th.Tensor:
rew = self.reward_train(state, action, next_state, done)
assert rew.shape == state.shape[:1]
return rew
def reward_train(
self,
state: th.Tensor,
action: th.Tensor,
next_state: th.Tensor,
done: th.Tensor,
) -> th.Tensor:
logits = self.logits_gen_is_high(state, action, next_state, done)
rew = -F.logsigmoid(logits)
assert rew.shape == state.shape[:1]
return rew
| mit | 27a670e100a7901b8dc3db6341f2ac34 | 32.884615 | 97 | 0.58359 | 3.900696 | false | false | false | false |
valohai/valohai-cli | valohai_cli/utils/matching.py | 1 | 2213 | import re
from typing import Any, Iterable, List, Optional, Union
import click
from valohai_cli.utils import force_text
from valohai_cli.utils.cli_utils import join_with_style
def match_prefix(choices: Iterable[Any], value: str, return_unique: bool = True) -> Union[List[Any], Any, None]:
"""
Match `value` in `choices` by case-insensitive prefix matching.
If the exact `value` is in `choices` (and `return_unique` is set),
that exact value is returned as-is.
:param choices: Choices to match in. May be non-string; `str()` is called on them if not.
:param value: The value to use for matching.
:param return_unique: If only one option was found, return it; otherwise return None.
If this is not true, all of the filtered choices are returned.
:return: list, object or none; see the `return_unique` option.
"""
if return_unique and value in choices:
return value
value_re = re.compile(f'^{re.escape(value)}', re.I)
choices = [choice for choice in choices if value_re.match(force_text(choice))]
if return_unique:
return (choices[0] if len(choices) == 1 else None)
return choices
def match_from_list_with_error(
options: Iterable[str],
input: str,
noun: str = "object",
param_hint: Optional[str] = None,
) -> str:
"""
Wrap `match_prefix` and raise a pretty CLI error if no match is found, or if multiple matches are found.
"""
if input in options:
return input
matching_options = match_prefix(options, input, return_unique=False)
if not matching_options:
styled_options = join_with_style(sorted(options), bold=True)
raise click.BadParameter(
f'"{input}" is not a known {noun} (try one of {styled_options})', param_hint=param_hint)
if len(matching_options) > 1:
styled_matches = join_with_style(sorted(matching_options), bold=True)
styled_options = join_with_style(sorted(options), bold=True)
raise click.BadParameter(
f'"{input}" is ambiguous.\nIt matches {styled_matches}.\nKnown {noun} are {styled_options}.',
param_hint=param_hint,
)
return str(matching_options[0])
| mit | 40d064dc56cced971ac4e9e33f0c3c73 | 39.236364 | 112 | 0.659738 | 3.639803 | false | false | false | false |
jazzband/website | jazzband/projects/views.py | 1 | 23433 | import hashlib
import hmac
import logging
import os
import shutil
import tempfile
from datetime import datetime
import delegator
import requests
from flask import (
Blueprint,
abort,
current_app,
flash,
jsonify,
make_response,
redirect,
request,
send_from_directory,
url_for,
)
from flask.views import MethodView
from flask_login import current_user, login_required
from packaging.version import parse as parse_version
from pkg_resources import safe_name
from requests.exceptions import HTTPError
from sqlalchemy import desc, nullsfirst, nullslast
from sqlalchemy.sql.expression import func
from werkzeug.security import safe_join
from werkzeug.utils import secure_filename
from ..account import github
from ..account.forms import LeaveForm
from ..auth import current_user_is_roadie
from ..decorators import templated
from ..exceptions import eject
from ..members.decorators import member_required
from ..tasks import spinach
from .forms import DeleteForm, ReleaseForm, UploadForm
from .models import Project, ProjectMembership, ProjectUpload
from .tasks import send_new_upload_notifications, update_upload_ordering
projects = Blueprint("projects", __name__, url_prefix="/projects")
logger = logging.getLogger(__name__)
MAX_FILESIZE = 60 * 1024 * 1024 # 60M
MAX_SIGSIZE = 8 * 1024 # 8K
SIGNATURE_START = b"-----BEGIN PGP SIGNATURE-----"
PATH_HASHER = "sha256"
DEFAULT_SORTER = func.random()
SORTERS = {
"uploads": Project.uploads_count,
"members": Project.membership_count,
"watchers": Project.subscribers_count,
"stargazers": Project.stargazers_count,
"forks": Project.forks_count,
"issues": Project.open_issues_count,
"name": Project.name,
"random": DEFAULT_SORTER,
}
DEFAULT_ORDER = "desc"
@projects.route("")
@templated()
def index():
requested_sorter = request.args.get("sorter", None)
if requested_sorter is None or requested_sorter not in SORTERS:
sorter = "random"
initial_sorting = True
else:
sorter = requested_sorter
initial_sorting = False
criterion = SORTERS.get(sorter, DEFAULT_SORTER)
order = request.args.get("order", None)
if order == DEFAULT_ORDER:
criterion = nullslast(desc(criterion))
else:
criterion = nullsfirst(criterion)
projects = Project.query.filter(Project.is_active.is_(True)).order_by(criterion)
return {
"projects": projects,
"sorter": sorter,
"initial_sorting": initial_sorting,
"order": order,
"DEFAULT_ORDER": DEFAULT_ORDER,
}
class ProjectMixin:
def project_query(self, name):
return Project.query.filter(Project.is_active.is_(True), Project.name == name)
def project_name(self, *args, **kwargs):
name = kwargs.get("name")
if not name:
abort(404)
return name
def redirect_to_project(self):
return redirect(url_for("projects.detail", name=self.project.name))
def dispatch_request(self, *args, **kwargs):
name = self.project_name(*args, **kwargs)
self.project = self.project_query(name).first_or_404()
return super().dispatch_request(*args, **kwargs)
class DetailView(ProjectMixin, MethodView):
"""
A view to show the details of a project.
"""
methods = ["GET"]
decorators = [templated()]
def get(self, name):
uploads = self.project.uploads.order_by(
ProjectUpload.ordering.desc(), ProjectUpload.version.desc()
)
versions = {upload.version for upload in uploads}
return {
"project": self.project,
"uploads": uploads,
"versions": sorted(versions, key=parse_version, reverse=True),
}
class JoinView(ProjectMixin, MethodView):
"""
A view to show the join a project team.
"""
methods = ["GET"]
decorators = [
member_required(message="You currently can't join this project"),
login_required,
]
def get(self, name):
response = github.join_team(self.project.team_slug, current_user.login)
if response and response.status_code == 200:
membership = self.project.membership.filter(
ProjectMembership.user_id == current_user.id,
).first()
if not membership:
# create a new project membership
membership = ProjectMembership(
user_id=current_user.id, project_id=self.project.id
)
membership.save()
flash(f"You have joined the {self.project.name} team.")
else:
flash(f"Something went wrong while joining the {self.project.name} team.")
return self.redirect_to_project()
class LeaveView(ProjectMixin, MethodView):
"""
A view to show the join a project team.
"""
methods = ["GET", "POST"]
decorators = [member_required(), login_required, templated()]
def get(self, name):
if not self.project.user_is_member(current_user):
flash(f"You're not a member of {self.project.name} at the moment.")
return self.redirect_to_project()
return {
"project": self.project,
"leave_form": LeaveForm(),
}
def post(self, name):
if not self.project.user_is_member(current_user):
flash(f"You're not a member of {self.project.name} at the moment.")
return self.redirect_to_project()
form = LeaveForm()
if form.validate_on_submit():
response = github.leave_team(self.project.team_slug, current_user.login)
if response and response.status_code == 204:
membership = self.project.membership.filter(
ProjectMembership.user_id == current_user.id,
).first()
if membership:
membership.delete()
flash(
f"You have been removed from the {self.project.name} team. "
f"See you soon!"
)
else:
flash(
f"Leaving the {self.project.name} team failed. "
f"Please try again or open a ticket for the roadies."
)
return self.redirect_to_project()
return {
"project": self.project,
"leave_form": form,
}
class UploadView(ProjectMixin, MethodView):
"""
A view to show the details of a project and also handling file uploads
via Twine/distutils.
"""
methods = ["POST"]
def check_authentication(self):
"""
Authenticate a request using a database lookup.
"""
if request.authorization is None:
return False
# the upload killswitch
if not current_app.config["UPLOAD_ENABLED"]:
return False
if request.authorization.username != "jazzband":
return False
return self.project.credentials.filter_by(
is_active=True, key=request.authorization.password
).scalar()
def post(self, name):
if not self.check_authentication():
response = make_response("", 401)
response.headers["WWW-Authenticate"] = 'Basic realm="Jazzband"'
return response
# distutils "helpfully" substitutes unknown, but "required" values
# with the string "UNKNOWN". This is basically never what anyone
# actually wants so we'll just go ahead and delete anything whose
# value is UNKNOWN.
form_copy = request.form.copy()
unknown_found = False
for key, value in request.form.items():
if value == "UNKNOWN":
unknown_found = True
form_copy.pop(key)
if unknown_found:
request.form = form_copy
form = UploadForm(meta={"csrf": False})
validation_order = ["name", "version", "content"]
if not form.validate_on_submit():
for field_name in validation_order:
if field_name in form.errors:
break
else:
field_name = sorted(form.errors.keys())[0]
eject(
400,
description="%s: %s" % (field_name, ", ".join(form.errors[field_name])),
)
# the upload FileStorage
upload_data = form.content.data
if upload_data is None:
eject(400, description="Upload payload does not have a file.")
upload_filename = secure_filename(upload_data.filename)
# Make sure that our filename matches the project that it is being
# uploaded to.
prefix = safe_name(self.project.name).lower()
if not safe_name(upload_filename).lower().startswith(prefix):
eject(
400,
description="The filename for %r must start with %r."
% (self.project.name, prefix),
)
# Fail if a project upload already exists
if ProjectUpload.query.filter_by(
filename=upload_filename, project_id=self.project.id
).scalar():
eject(400, description="File already exists.")
# Store file uploads and calculate hashes
with tempfile.TemporaryDirectory() as tmpdir:
upload_path = os.path.join(tmpdir, upload_filename)
upload_data.stream.seek(0)
upload_data.save(upload_path)
# Buffer the entire file onto disk, checking the hash of the file
# as we go along.
with open(upload_path, "rb") as upload_file:
file_hashes = {
"md5": hashlib.md5(),
"sha256": hashlib.sha256(),
"blake2_256": hashlib.blake2b(digest_size=256 // 8),
}
for chunk in iter(lambda: upload_file.read(8096), b""):
for hasher in file_hashes.values():
hasher.update(chunk)
# Take our hash functions and compute the final hashes for them
# now.
file_hashes = {
method: file_hash.hexdigest().lower()
for method, file_hash in file_hashes.items()
}
# Actually verify the digests that we've gotten. We're going to use
# hmac.compare_digest even though we probably don't actually need
# to because it's better safe than sorry. In the case of multiple
# digests we expect them all to be given.
hash_comparisons = [
hmac.compare_digest(
getattr(form, "%s_digest" % digest_name).data.lower(), digest_value
)
for digest_name, digest_value in file_hashes.items()
if getattr(form, "%s_digest" % digest_name).data
]
if not all(hash_comparisons):
eject(
400,
description="The digest supplied does not match a digest "
"calculated from the uploaded file.",
)
# Also buffer the entire signature file to disk.
signature = form.gpg_signature.data
signature_filename = upload_filename + ".asc"
if signature:
signature_path = os.path.join(tmpdir, signature_filename)
signature.stream.seek(0)
signature.save(signature_path)
if os.path.getsize(signature_path) > MAX_SIGSIZE:
eject(400, description="Signature too large.")
# Check whether signature is ASCII armored
with open(signature_path, "rb") as signature_file:
if not signature_file.read().startswith(SIGNATURE_START):
eject(400, description="PGP signature is not ASCII armored.")
version = form.version.data
upload = ProjectUpload(
version=version,
project=self.project,
# e.g. acme/2coffee12345678123123123123123123
path=safe_join(self.project.name, file_hashes[PATH_HASHER]),
filename=upload_filename,
size=os.path.getsize(upload_path),
md5_digest=file_hashes["md5"],
sha256_digest=file_hashes["sha256"],
blake2_256_digest=file_hashes["blake2_256"],
form_data=request.form,
user_agent=request.user_agent.string,
remote_addr=request.remote_addr,
)
# make the storage path directory /app/uploads/acme
os.makedirs(os.path.dirname(upload.full_path), exist_ok=True)
# move the uploaded file to storage path directory
shutil.move(upload_path, upload.full_path)
# copy the uploaded signature file to storage path directory
if signature:
shutil.move(signature_path, upload.full_path + ".asc")
# write to database
upload.save()
spinach.schedule(send_new_upload_notifications, self.project.id)
spinach.schedule(update_upload_ordering, self.project.id)
return "OK"
class UploadActionView(ProjectMixin, MethodView):
decorators = [login_required]
def dispatch_request(self, *args, **kwargs):
name = self.project_name(*args, **kwargs)
self.project = self.project_query(name).first_or_404()
self.upload = self.project.uploads.filter_by(
id=kwargs.get("upload_id")
).first_or_404()
return super().dispatch_request(*args, **kwargs)
class UploadMembersActionView(UploadActionView):
def project_query(self, name):
projects = super().project_query(name)
return projects.filter(Project.membership.any(user=current_user))
class UploadLeadsActionView(UploadMembersActionView):
def project_query(self, name):
projects = super().project_query(name)
if current_user_is_roadie():
return projects
return projects.filter(
Project.membership.any(is_lead=True),
)
class UploadDownloadView(UploadMembersActionView):
methods = ["GET"]
def get(self, name, upload_id):
max_age = current_app.get_send_file_max_age(self.upload.full_path)
path, filename = os.path.split(self.upload.full_path)
return send_from_directory(
path,
filename,
max_age=max_age,
as_attachment=True,
download_name=self.upload.filename,
etag=False,
conditional=True,
)
class UploadFormDataView(UploadMembersActionView):
methods = ["GET"]
def get(self, name, upload_id):
return jsonify(self.upload.form_data)
class UploadReleaseView(UploadLeadsActionView):
methods = ["GET", "POST"]
decorators = UploadLeadsActionView.decorators + [templated()]
def validate_upload(self):
errors = []
try:
# check pypi if file was added, check sha256 digest, size and
# filename
pypi_response = requests.get(self.upload.project.pypi_json_url)
pypi_response.raise_for_status()
data = pypi_response.json()
except HTTPError:
# in case there was a network issue with getting the JSON
# data from PyPI
error = "Error while validating upload"
logger.error(error, exc_info=True)
errors.append(error)
except ValueError:
# or something was wrong about the returned JSON data
error = "Error while parsing response from PyPI during validation"
logger.error(error, exc_info=True)
errors.append(error)
except Exception:
error = "Unknown error"
logger.error(error, exc_info=True)
errors.append(error)
else:
# next check the data for what we're looking for
releases = data.get("releases", {})
release_files = releases.get(self.upload.version, [])
if release_files:
for release_file in release_files:
release_filename = release_file.get("filename", None)
if release_filename is None:
error = "No file found in PyPI validation response."
logger.error(error, extra={"stack": True})
if release_filename == self.upload.filename:
digests = release_file.get("digests", {})
if digests:
md5_digest = digests.get("md5", None)
if md5_digest and md5_digest != self.upload.md5_digest:
error = (
f"MD5 hash of {self.upload.filename} does "
f"not match hash returned by PyPI."
)
errors.append(error)
logger.error(error, extra={"stack": True})
sha256_digest = digests.get("sha256", None)
if (
sha256_digest
and sha256_digest != self.upload.sha256_digest
):
error = (
f"SHA256 hash of {self.upload.filename} "
f"does not match hash returned by PyPI."
)
errors.append(error)
logger.error(error, extra={"stack": True})
else:
error = f"No digests for file {self.upload.filename}"
errors.append(error)
logger.error(error, extra={"stack": True})
else:
error = f"No released files found for upload " f"{self.upload.filename}"
errors.append(error)
logger.error(error, extra={"stack": True})
return errors
def post(self, name, upload_id):
if not current_app.config["RELEASE_ENABLED"]:
message = "Releasing is currently out of service"
flash(message)
logging.info(message)
if self.upload.released_at:
flash(
f"The upload {self.upload.filename} has already been released "
f"and can't be released again."
)
return self.redirect_to_project()
release_form = ReleaseForm(project_name=self.project.name)
context = {
"release_form": release_form,
"project": self.project,
"upload": self.upload,
}
if release_form.validate_on_submit():
# copy path to new tmp directory
with tempfile.TemporaryDirectory() as tmpdir:
upload_path = os.path.join(tmpdir, self.upload.filename)
shutil.copy(self.upload.full_path, upload_path)
# run twine upload against copied upload file
twine_run = delegator.run(f"twine upload {upload_path}")
if twine_run.return_code == 0:
errors = self.validate_upload()
release_form.add_global_error(*errors)
if not errors:
# create ProjectRelease object with reference to project
self.upload.released_at = datetime.utcnow()
# write to database
self.upload.save()
message = f"You've successfully released {self.upload} to PyPI."
flash(message)
logger.info(message)
return self.redirect_to_project()
else:
error = f"Release of {self.upload} failed."
release_form.add_global_error(error)
logger.error(
error, extra={"data": {"out": twine_run.out, "err": twine_run.err}}
)
context.update({"twine_run": twine_run, "upload": self.upload})
return context
def get(self, name, upload_id):
if self.upload.released_at:
message = (
f"The upload {self.upload} has already been released "
f"and can't be released again."
)
flash(message)
logger.info(message)
return self.redirect_to_project()
release_form = ReleaseForm(project_name=self.project.name)
return {
"project": self.project,
"release_form": release_form,
"upload": self.upload,
}
class UploadDeleteView(UploadLeadsActionView):
methods = ["GET", "POST"]
decorators = UploadLeadsActionView.decorators + [templated()]
def get(self, name, upload_id):
if self.upload.released_at:
message = (
f"The upload {self.upload} has already been "
f"released and can't be deleted."
)
flash(message)
logger.info(message)
return self.redirect_to_project()
return {
"project": self.project,
"upload": self.upload,
"delete_form": DeleteForm(project_name=self.project.name),
}
def post(self, name, upload_id):
if self.upload.released_at:
message = (
f"The upload {self.upload} has already been "
f"released and can't be deleted."
)
flash(message)
logger.error(message, extra={"stack": True})
return self.redirect_to_project()
delete_form = DeleteForm(project_name=self.project.name)
context = {
"delete_form": delete_form,
"project": self.project,
"upload": self.upload,
}
if delete_form.validate_on_submit():
self.upload.delete()
message = f"You've successfully deleted the upload {self.upload}."
flash(message)
logger.info(message)
return self.redirect_to_project()
else:
return context
# /projects/test-project/1/delete
projects.add_url_rule(
"/<name>/upload/<upload_id>/delete", view_func=UploadDeleteView.as_view("delete")
)
# /projects/test-project/1/data
projects.add_url_rule(
"/<name>/upload/<upload_id>/formdata",
view_func=UploadFormDataView.as_view("formdata"),
)
# /projects/test-project/1/download
projects.add_url_rule(
"/<name>/upload/<upload_id>/download",
view_func=UploadDownloadView.as_view("download"),
)
# /projects/test-project/1/release
projects.add_url_rule(
"/<name>/upload/<upload_id>/release", view_func=UploadReleaseView.as_view("release")
)
# /projects/test-project/join
projects.add_url_rule("/<name>/join", view_func=JoinView.as_view("join"))
# /projects/test-project/leave
projects.add_url_rule("/<name>/leave", view_func=LeaveView.as_view("leave"))
# /projects/test-project/upload
projects.add_url_rule("/<name>/upload", view_func=UploadView.as_view("upload"))
# /projects/test-project
projects.add_url_rule("/<name>", view_func=DetailView.as_view("detail"))
| mit | c60047afc6fe06ec4f332cadb4e4af40 | 35.217929 | 88 | 0.570008 | 4.334628 | false | false | false | false |
jazzband/website | jazzband/projects/models.py | 1 | 7862 | import os
import time
from datetime import datetime
from uuid import uuid4
from flask import current_app, render_template
from flask_login import current_user
from sqlalchemy import func, orm
from sqlalchemy.dialects.postgresql import JSONB, UUID
from sqlalchemy_utils import aggregated, generic_repr
from werkzeug.security import safe_join
from ..account import github
from ..auth import current_user_is_roadie
from ..db import postgres as db
from ..members.models import User
from ..mixins import Syncable
@generic_repr("id", "name")
class Project(db.Model, Syncable):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Text, nullable=False, index=True)
team_slug = db.Column(db.String(255))
normalized_name = orm.column_property(func.normalize_pep426_name(name))
description = db.Column(db.Text)
html_url = db.Column(db.String(255))
subscribers_count = db.Column(db.SmallInteger, default=0, nullable=False)
stargazers_count = db.Column(db.SmallInteger, default=0, nullable=False)
forks_count = db.Column(db.SmallInteger, default=0, nullable=False)
open_issues_count = db.Column(db.SmallInteger, default=0, nullable=False)
is_active = db.Column(db.Boolean, default=True, nullable=False, index=True)
transfer_issue_url = db.Column(db.String(255))
membership = db.relationship("ProjectMembership", backref="project", lazy="dynamic")
credentials = db.relationship(
"ProjectCredential", backref="project", lazy="dynamic"
)
uploads = db.relationship(
"ProjectUpload",
backref="project",
lazy="dynamic",
order_by=lambda: ProjectUpload.ordering.desc().nullslast(),
)
created_at = db.Column(db.DateTime, nullable=True)
updated_at = db.Column(db.DateTime, nullable=True)
pushed_at = db.Column(db.DateTime, nullable=True)
__tablename__ = "projects"
__table_args__ = (
db.Index("release_name_idx", "name"),
db.Index("release_name_is_active_idx", "name", "is_active"),
)
def __str__(self):
return self.name
@aggregated("uploads", db.Column(db.SmallInteger))
def uploads_count(self):
return db.func.count("1")
@aggregated("membership", db.Column(db.SmallInteger))
def membership_count(self):
return db.func.count("1")
@property
def current_user_is_member(self):
if not current_user:
return False
elif not current_user.is_authenticated:
return False
elif current_user_is_roadie():
return True
else:
return self.user_is_member(current_user)
@property
def current_user_is_lead(self):
if not current_user:
return False
elif not current_user.is_authenticated:
return False
elif current_user_is_roadie():
return True
else:
return current_user.id in [
user.id for user in self.lead_members.options(orm.load_only("id"))
]
@property
def all_members(self):
return (
User.active_members()
.join(User.projects_memberships)
.filter(ProjectMembership.project_id == self.id)
)
@property
def nonlead_members(self):
return self.all_members.filter(ProjectMembership.is_lead.is_(False))
@property
def lead_members(self):
return self.all_members.filter(ProjectMembership.is_lead.is_(True))
def user_is_member(self, user):
return user.id in [
member.id for member in self.all_members.options(orm.load_only("id"))
]
@property
def pypi_json_url(self):
"""
The URL to fetch JSON data from PyPI, using a timestamp to work-around
the PyPI CDN cache.
"""
return (
f"https://pypi.org/pypi/{self.normalized_name}/json?time={int(time.time())}"
)
def create_transfer_issue(self, assignees, **data):
issue_response = github.new_project_issue(
repo=self.name,
data={
"title": render_template("hooks/project-title.txt", **data),
"body": render_template("hooks/project-body.txt", **data),
"assignees": assignees,
},
)
issue_data = issue_response.json()
issue_url = issue_data.get("html_url")
if issue_url.startswith(f"https://github.com/jazzband/{self.name}"):
self.transfer_issue_url = issue_url
self.save()
def create_team(self):
team_response = github.create_project_team(self.name)
if team_response and team_response.status_code == 201:
team_data = team_response.json()
self.team_slug = team_data.get("slug")
self.save()
return team_response
@generic_repr("id", "project_id", "is_active", "key")
class ProjectCredential(db.Model):
id = db.Column(db.Integer, primary_key=True)
project_id = db.Column(db.Integer, db.ForeignKey("projects.id"))
is_active = db.Column(db.Boolean, default=True, nullable=False, index=True)
key = db.Column(UUID(as_uuid=True), default=uuid4)
__tablename__ = "project_credentials"
__table_args__ = (db.Index("release_key_is_active_idx", "key", "is_active"),)
def __str__(self):
return self.key.hex
@generic_repr("id", "user_id", "project_id", "is_lead")
class ProjectMembership(db.Model, Syncable):
id = db.Column("id", db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey("users.id"))
project_id = db.Column(db.Integer, db.ForeignKey("projects.id"))
joined_at = db.Column(db.DateTime, default=datetime.utcnow)
is_lead = db.Column(db.Boolean, default=False, nullable=False, index=True)
__tablename__ = "project_memberships"
def __str__(self):
return f"User: {self.user}, Project: {self.project}"
@generic_repr("id", "project_id", "filename")
class ProjectUpload(db.Model):
id = db.Column(db.Integer, primary_key=True)
project_id = db.Column(db.Integer, db.ForeignKey("projects.id"))
version = db.Column(db.Text, index=True)
path = db.Column(db.Text, unique=True, index=True)
filename = db.Column(db.Text, unique=True, index=True)
signaturename = orm.column_property(filename + ".asc")
size = db.Column(db.Integer)
md5_digest = db.Column(db.Text, unique=True, nullable=False)
sha256_digest = db.Column(db.Text, unique=True, nullable=False)
blake2_256_digest = db.Column(db.Text, unique=True, nullable=False)
uploaded_at = db.Column(db.DateTime, default=datetime.utcnow)
released_at = db.Column(db.DateTime, nullable=True)
notified_at = db.Column(db.DateTime, nullable=True, index=True)
form_data = db.Column(JSONB)
user_agent = db.Column(db.Text)
remote_addr = db.Column(db.Text)
ordering = db.Column(db.Integer, default=0)
__tablename__ = "project_uploads"
__table_args__ = (
db.CheckConstraint("sha256_digest ~* '^[A-F0-9]{64}$'"),
db.CheckConstraint("blake2_256_digest ~* '^[A-F0-9]{64}$'"),
db.Index("project_uploads_project_version", "project_id", "version"),
)
@property
def full_path(self):
# build storage path, e.g.
# /app/uploads/acme/2coffee12345678123123123123123123
return safe_join(current_app.config["UPLOAD_ROOT"], self.path)
@property
def signature_path(self):
return self.full_path + ".asc"
def __str__(self):
return self.filename
@db.event.listens_for(ProjectUpload, "after_delete")
def delete_upload_file(mapper, connection, target):
# When a model with a timestamp is updated; force update the updated
# timestamp.
os.remove(target.full_path)
if os.path.exists(target.signature_path):
os.remove(target.signature_path)
| mit | f323ed307ec8dbf97732c587ab65598a | 34.414414 | 88 | 0.641567 | 3.527142 | false | false | false | false |
valohai/valohai-cli | tests/commands/deployment/test_create_version.py | 1 | 1948 | import uuid
import pytest
from tests.commands.run_test_utils import RunAPIMock
from valohai_cli.commands.deployment.create_version import create_version
from valohai_cli.ctx import get_project
from valohai_cli.models.project import Project
@pytest.mark.parametrize('name', (None, '666'))
def test_create_version(runner, logged_in_and_linked, monkeypatch, name):
commit_identifier = 'f' * 16
def mock_resolve_commits(mock_self, *, commit_identifier):
return [{'identifier': commit_identifier}]
monkeypatch.setattr(Project, 'resolve_commits', mock_resolve_commits)
project = get_project()
apimock_kwargs = {'deployment_version_name': name} if name else {}
apimock = RunAPIMock(project.id, commit_identifier, **apimock_kwargs)
args = ['-d', 'main-deployment', '-e', 'greet', '-c', commit_identifier]
if name:
args.extend(['-n', name])
with apimock:
# No matching deployment?
output = runner.invoke(create_version, ['-d', 'not-found-deployment', '-e', 'greet', '-c', commit_identifier]).output
assert '"not-found-deployment" is not a known deployment (try one of main-deployment)' in output
output = runner.invoke(create_version, args).output
assert 'Success!' in output
# Endpoint with required files
endpoint = 'predict-digit'
args = ['-d', 'main-deployment', '-e', endpoint, '-c', commit_identifier]
if name:
args.extend(['-n', name])
with apimock:
output = runner.invoke(create_version, args).output
assert f'--{endpoint}-model' in output
args.extend([f'--{endpoint}-model', 'potat.h5'])
output = runner.invoke(create_version, args).output
assert "Not valid datum id: potat.h5" in output
datum_id = str(uuid.uuid4())
args.extend([f'--{endpoint}-model', datum_id])
output = runner.invoke(create_version, args).output
assert 'Success!' in output
| mit | 2fa42a15ce44e40a1c7e2f290b135281 | 37.196078 | 125 | 0.661191 | 3.746154 | false | true | false | false |
valohai/valohai-cli | tests/stub_git.py | 1 | 2737 | import os
import tempfile
from pathlib import Path
from subprocess import check_call, check_output
from typing import Any, List, Optional, Tuple, Union
from valohai_cli.utils import get_random_string
class StubGit:
def __init__(self, path: Any):
if type(path) not in {Path, str}:
# support for the legacy `_pytest._py.path.LocalPath` generated by the `tmpdir` fixture
path = str(path)
if isinstance(path, str):
path = Path(path)
path = path.resolve(strict=False)
# we don't manage the temporary directory lifecycle so expect it to exist
assert path.exists(), 'stub git root directory path does not exist'
# a sanity check that we are not potentially mangling some real git repository
from_env = os.environ.get('PYTEST_DEBUG_TEMPROOT')
temp_root = Path(from_env or tempfile.gettempdir()).resolve()
assert temp_root in path.parents, 'stub git not inside temporary directory'
self.dir_path = path
@property
def dir_str(self) -> str:
# `pathlib.Path` is frequently needed as a classical string
return str(self.dir_path)
def init(self):
check_call('git init', cwd=self.dir_str, shell=True)
check_call('git config user.name Robot', cwd=self.dir_str, shell=True)
check_call('git config user.email robot@example.com', cwd=self.dir_str, shell=True)
def write(
self,
path: Union[Path, str],
*,
content: Optional[str] = None,
add: bool = True,
):
if content is None:
content = get_random_string()
absolute_path, relative_path = self._pathify(path)
absolute_path.parent.mkdir(parents=True, exist_ok=True)
absolute_path.write_text(content, 'utf8')
if add:
self.add(relative_path)
def add(self, path: Union[Path, str]):
_, relative_path = self._pathify(path)
check_call(f'git add {relative_path}', cwd=self.dir_str, shell=True)
def add_all(self):
check_call('git add .', cwd=self.dir_str, shell=True)
def commit(self, message: str = 'bugfix'):
check_call(f'git commit -m "{message}"', cwd=self.dir_str, shell=True)
def log(self) -> List[str]:
# full git commit SHAs in reverse chronological order (the latest first)
raw = check_output('git log --pretty=format:%H', cwd=self.dir_str, shell=True)
return raw.decode('utf-8').split()
def _pathify(self, path: Union[Path, str]) -> Tuple[Path, Path]:
if isinstance(path, str):
path = Path(path)
absolute = (self.dir_path / path).resolve()
return absolute, absolute.relative_to(self.dir_path)
| mit | b18d43e1777b57c1e8d05a700eef6e7a | 34.089744 | 99 | 0.625868 | 3.688679 | false | false | false | false |
valohai/valohai-cli | valohai_cli/utils/file_size_format.py | 1 | 1054 | # Adapted from Jinja2. Jinja2 is (c) 2017 by the Jinja Team, licensed under the BSD license.
from typing import Union
binary_prefixes = ['KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB']
decimal_prefixes = ['kB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']
def filesizeformat(value: Union[int, float], binary: bool = False) -> str:
"""Format the value like a 'human-readable' file size (i.e. 13 kB,
4.1 MB, 102 Bytes, etc). Per default decimal prefixes are used (Mega,
Giga, etc.), if the second parameter is set to `True` the binary
prefixes are used (Mebi, Gibi).
"""
bytes = float(value)
base = 1024 if binary else 1000
prefixes = binary_prefixes if binary else decimal_prefixes
if bytes == 1:
return '1 Byte'
elif bytes < base:
return f'{bytes} Bytes'
else:
for i, prefix in enumerate(prefixes):
unit = base ** (i + 2)
if bytes < unit:
return f'{base * bytes / unit:.1f} {prefix}'
return f'{base * bytes / unit:.1f} {prefix}'
| mit | 26fad729270a2190d17667cf6275356d | 39.538462 | 92 | 0.594877 | 3.346032 | false | false | false | false |
jazzband/website | jazzband/admin.py | 3 | 2880 | from flask import redirect, request, session, url_for
from flask_admin import Admin, AdminIndexView, expose
from flask_admin.contrib import sqla
from flask_login import current_user
from .account.models import OAuth
from .auth import current_user_is_roadie
from .db import postgres
from .members.models import EmailAddress, User
from .projects.models import (
Project,
ProjectCredential,
ProjectMembership,
ProjectUpload,
)
class JazzbandModelView(sqla.ModelView):
def is_accessible(self):
return current_user_is_roadie()
def inaccessible_callback(self, name, **kwargs):
# redirect to login page if user doesn't have access
session["next"] = request.url
return redirect(url_for("github.login"))
class JazzbandAdminIndexView(AdminIndexView):
@expose("/")
def index(self):
if not current_user.is_authenticated:
session["next"] = request.url
return redirect(url_for("github.login"))
return super().index()
class UserAdmin(JazzbandModelView):
column_searchable_list = ("login",)
column_filters = (
"is_member",
"is_roadie",
"is_banned",
"is_restricted",
"has_2fa",
"joined_at",
"left_at",
"consented_at",
"profile_consent",
"org_consent",
"cookies_consent",
"age_consent",
)
inline_models = (OAuth, EmailAddress, ProjectMembership)
class OAuthAdmin(JazzbandModelView):
column_searchable_list = ("token", "user_id")
column_filters = ("created_at", "provider")
class EmailAddressAdmin(JazzbandModelView):
column_searchable_list = ("email",)
column_filters = ("verified", "primary")
class ProjectAdmin(JazzbandModelView):
column_searchable_list = ("name", "description")
column_filters = ("is_active", "created_at", "updated_at", "pushed_at")
inline_models = (ProjectCredential, ProjectUpload, ProjectMembership)
class ProjectUploadAdmin(JazzbandModelView):
column_searchable_list = ("filename", "sha256_digest")
column_filters = ("uploaded_at", "released_at")
class ProjectMembershipAdmin(JazzbandModelView):
column_filters = ("is_lead", "user_id", "project_id", "joined_at")
column_searchable_list = ("project_id", "user_id")
def init_app(app):
admin = Admin(
app,
name="jazzband",
template_mode="bootstrap4",
index_view=JazzbandAdminIndexView(),
)
model_admins = [
(User, UserAdmin),
(OAuth, OAuthAdmin),
(EmailAddress, EmailAddressAdmin),
(Project, ProjectAdmin),
(ProjectMembership, ProjectMembershipAdmin),
(ProjectUpload, ProjectUploadAdmin),
(ProjectCredential, JazzbandModelView),
]
for model_cls, admin_cls in model_admins:
admin.add_view(admin_cls(model_cls, postgres.session))
| mit | b2e798f0519007ab045b2e49d721038f | 27.514851 | 75 | 0.658333 | 3.68758 | false | false | false | false |
jazzband/website | jazzband/account/blueprint.py | 1 | 11651 | import logging
from flask import current_app, flash
from flask_dance.consumer import OAuth2ConsumerBlueprint, oauth_error
from flask_dance.consumer.requests import BaseOAuth2Session, OAuth2Session
from flask_dance.consumer.storage.sqla import SQLAlchemyStorage
from flask_login import current_user, login_user
from sentry_sdk import capture_message, configure_scope
from urlobject import URLObject
from werkzeug.utils import cached_property
from ..cache import cache
from ..db import postgres as db
from ..exceptions import RateLimit
from .models import OAuth
logger = logging.getLogger(__name__)
@oauth_error.connect
def github_error(blueprint, error, error_description=None, error_uri=None):
"""A GitHub API error handler that pushes the error to Sentry
and shows a flash message to the user.
"""
if error:
with configure_scope() as scope:
scope.set_extra("error_description", error_description)
scope.set_extra("error_uri", error_uri)
capture_message(f"Error during OAUTH found: {error}")
flash(
f"OAuth error from Github ({error}): {error_description}", category="error"
)
class GitHubSessionMixin:
"""A requests session mixin for GitHub that implements currently:
- rate limit handling (by raising an exception when it happens)
- pagination by the additional all_pages parameter
"""
def request(self, method, url, data=None, headers=None, all_pages=False, **kwargs):
response = super().request(
method=method, url=url, data=data, headers=headers, **kwargs
)
if response.status_code == 403:
ratelimit_remaining = response.headers.get("X-RateLimit-Remaining")
if ratelimit_remaining:
try:
if int(ratelimit_remaining) < 1:
raise RateLimit(response=response)
except ValueError:
pass
if all_pages:
result = response.json()
while response.links.get("next"):
url = response.links["next"]["url"]
response = super().request(
method=method, url=url, data=data, headers=headers, **kwargs
)
body = response.json()
if isinstance(body, list):
result += body
elif isinstance(body, dict) and "items" in body:
result["items"] += body["items"]
return result
else:
return response
class GitHubSession(GitHubSessionMixin, OAuth2Session):
"""A custom GitHub session that implements a bunch of GitHub
API specific functionality (e.g. pagination and rate limit handling)
"""
class AdminGitHubSession(GitHubSessionMixin, BaseOAuth2Session):
"""A custom GitHub session class that uses the blueprint's
admin access token.
"""
def __init__(self, blueprint=None, base_url=None, *args, **kwargs):
token = {"access_token": blueprint.admin_access_token}
super().__init__(token=token, *args, **kwargs)
self.blueprint = blueprint
self.base_url = URLObject(base_url)
def request(self, method, url, data=None, headers=None, **kwargs):
if self.base_url:
url = self.base_url.relative(url)
return super().request(
method=method,
url=url,
data=data,
headers=headers,
client_id=self.blueprint.client_id,
client_secret=self.blueprint.client_secret,
**kwargs,
)
class GitHubBlueprint(OAuth2ConsumerBlueprint):
"""
A custom OAuth2 blueprint that implements some of our
specific GitHub API functions.
"""
def __init__(self, *args, **kwargs):
super().__init__(
base_url="https://api.github.com/",
authorization_url="https://github.com/login/oauth/authorize",
token_url="https://github.com/login/oauth/access_token",
session_class=GitHubSession,
storage=SQLAlchemyStorage(
OAuth, db.session, user=current_user, user_required=False, cache=cache
),
*args,
**kwargs,
)
self.from_config.update(
{
"client_id": "GITHUB_OAUTH_CLIENT_ID",
"client_secret": "GITHUB_OAUTH_CLIENT_SECRET",
"scope": "GITHUB_SCOPE",
"members_team_slug": "GITHUB_MEMBERS_TEAM_SLUG",
"roadies_team_slug": "GITHUB_ROADIES_TEAM_SLUG",
"admin_access_token": "GITHUB_ADMIN_TOKEN",
"org_name": "GITHUB_ORG_NAME",
}
)
def make_setup_state(self, app, options, first_registration=False):
# load config when the blueprint is registered
if first_registration:
with app.app_context():
self.load_config()
return super().make_setup_state(
app, options, first_registration=first_registration
)
@cached_property
def admin_session(self):
# FIXME investigate why config is not loading on cli invocation
self.load_config()
"This is a custom session using the organization's admin permissions."
return AdminGitHubSession(
client_id=self._client_id,
client=self.client,
auto_refresh_url=self.auto_refresh_url,
auto_refresh_kwargs=self.auto_refresh_kwargs,
scope=self.scope,
state=self.state,
blueprint=self,
base_url=self.base_url,
**self.kwargs,
)
def join_organization(self, user_login):
"""
Adds the GitHub user with the given login to the members team.
"""
return self.join_team(self.members_team_slug, user_login)
def leave_organization(self, user_login):
"""
Remove the GitHub user with the given login from the org.
https://docs.github.com/en/rest/reference/orgs#remove-an-organization-member
"""
return self.admin_session.delete(
f"orgs/{self.org_name}/memberships/{user_login}"
)
def get_project_team(self, slug):
"""
Get the information about the team with the given name.
"""
return self.admin_session.get(f"orgs/{self.org_name}/teams/{slug}")
def get_members_team_id(self):
"""
Fetches the GitHub team id of the Members team.
"""
member_team_response = self.admin_session.get(
f"orgs/{self.org_name}/teams/{self.members_team_slug}"
)
member_team_response.raise_for_status()
member_team_data = member_team_response.json()
return member_team_data.get("id")
def create_project_team(self, name):
"""
Create a project team in the members team with the given name.
Docs: https://docs.github.com/en/rest/reference/teams#create-a-team
"""
members_team_id = self.get_members_team_id()
if not members_team_id:
logger.error("Couldn't load member team details!", extra={"name": name})
return
return self.admin_session.post(
f"orgs/{self.org_name}/teams",
json={
"name": name,
"description": f"Team for {name}",
"repo_names": [f"{self.org_name}/{name}"],
"parent_team_id": members_team_id,
"privacy": "closed", # meaning that all org members can see it
},
headers={"Accept": "application/vnd.github.v3+json"},
)
def join_team(self, team_slug, username):
"""
Add the GitHub user with the given login to the given team slug.
https://docs.github.com/en/rest/reference/teams#add-or-update-team-membership-for-a-user
"""
return self.admin_session.put(
f"orgs/{self.org_name}/teams/{team_slug}/memberships/{username}",
headers={"Accept": "application/vnd.github.v3+json"},
)
def leave_team(self, team_slug, username):
"""
Remove the GitHub user with the given login from the given team slug.
https://docs.github.com/en/rest/reference/teams#remove-team-membership-for-a-user
"""
return self.admin_session.delete(
f"orgs/{self.org_name}/teams/{team_slug}/memberships/{username}",
headers={"Accept": "application/vnd.github.v3+json"},
)
def get_projects(self):
# https://docs.github.com/en/rest/reference/repos#list-organization-repositories
projects = self.admin_session.get(
f"orgs/{self.org_name}/repos?type=public", all_pages=True
)
projects_with_subscribers = []
for project in projects:
project_name = project["name"]
# https://docs.github.com/en/rest/reference/activity#list-watchers
watchers = self.admin_session.get(
f"repos/{self.org_name}/{project_name}/subscribers", all_pages=True
)
project["subscribers_count"] = len(watchers)
projects_with_subscribers.append(project)
return projects_with_subscribers
def get_teams(self):
# https://docs.github.com/en/rest/reference/teams#list-child-teams
return self.admin_session.get(
f"orgs/{self.org_name}/teams/{self.members_team_slug}/teams",
all_pages=True,
headers={"Accept": "application/vnd.github.hellcat-preview+json"},
)
def get_roadies(self):
return self.admin_session.get(
f"orgs/{self.org_name}/teams/{self.roadies_team_slug}/members",
all_pages=True,
)
def get_members(self, team_slug=None):
if team_slug is None:
team_slug = self.members_team_slug
without_2fa_ids = {user["id"] for user in self.get_without_2fa()}
roadies_ids = {roadie["id"] for roadie in self.get_roadies()}
all_members = self.admin_session.get(
f"orgs/{self.org_name}/teams/{team_slug}/members", all_pages=True
)
members = []
for member in all_members:
member["is_member"] = True
member["is_roadie"] = member["id"] in roadies_ids
member["has_2fa"] = member["id"] not in without_2fa_ids
members.append(member)
return members
def get_emails(self, user):
"""
Gets the verified email addresses of the authenticated GitHub user.
"""
with current_app.test_request_context("/"):
login_user(user)
return self.session.get("user/emails", all_pages=True)
def get_without_2fa(self):
"""
Gets the organization members without Two Factor Auth enabled.
"""
return self.admin_session.get(
f"orgs/{self.org_name}/members?filter=2fa_disabled", all_pages=True
)
def is_member(self, username):
"""
Checks if the GitHub user with the given login is member of the org.
"""
try:
response = self.admin_session.get(
f"orgs/{self.org_name}/members/{username}"
)
return response.status_code == 204
except Exception:
return False
def new_roadies_issue(self, data):
return self.new_project_issue(repo="help", org="jazzband", data=data)
def new_project_issue(self, repo, data, org="jazzband"):
return self.admin_session.post(f"repos/{org}/{repo}/issues", json=data)
| mit | a6223da1699a77d8fbb2f2c7c18cf6d7 | 35.638365 | 96 | 0.59291 | 4.072352 | false | false | false | false |
jazzband/website | jazzband/db.py | 3 | 7581 | from collections import deque
from contextlib import contextmanager
from flask_redis import FlaskRedis
from flask_sqlalchemy import Model, SQLAlchemy
from walrus import Walrus
from .exceptions import Rollback
class JazzbandModel(Model):
@classmethod
def update_or_create(cls, defaults=None, commit=True, **kwargs):
if defaults is None:
defaults = {}
instance = cls.query.filter_by(**kwargs).first()
if instance:
for arg, value in defaults.items():
setattr(instance, arg, value)
if commit:
postgres.session.commit()
return instance, False
else:
params = kwargs.copy()
params.update(defaults)
instance = cls(**params)
postgres.session.add(instance)
if commit:
postgres.session.commit()
return instance, True
def save(self, commit=True):
postgres.session.add(self)
if commit:
postgres.session.commit()
return self
def delete(self, commit=True):
postgres.session.delete(self)
if commit:
postgres.session.commit()
return self
class JazzbandSQLAlchemy(SQLAlchemy):
def init_app(self, app):
super().init_app(app)
app.config.setdefault("SQLALCHEMY_NESTED_TRANSACTION", False)
app.config.setdefault("SQLALCHEMY_ISOLATE_TRANSACTION", True)
# dispose of engine to fix issue with forks
# https://virtualandy.wordpress.com/2019/09/04/a-fix-for-operationalerror-psycopg2-operationalerror-ssl-error-decryption-failed-or-bad-record-mac/
with app.app_context():
self.engine.dispose()
@contextmanager
def transaction(self, isolate=None, nested=None, **kwargs):
"""Safely commits if no errors, will rollback otherwise.
This is preferably used with PEP 343 `with` statement, for example:
with db.transaction():
db.session.execute(...)
If `execute` succeeds without any exception, `commit` will be emitted;
or else if any exception (but ``Rollback`` in certain cases, see below)
is raised within the `with` block, or even if the implicit `commit`
fails, a `rollback` is guaranteed at the end of the `with` block.
In some cases, you may want to manually rollback the transaction from
inside. Generally you can simply raise any exception to abort the
transaction; alternatively there is a special exception ``Rollback``,
with which you can choose to let ``db.transaction`` handle the
exception. Please see ``Rollback`` for more information.
By default when `autocommit=False`, there is always an open transaction
(not necessarily DB-level) associated with any session object. In such
case, it is a common usage that, DB access can be performed anytime
whenever there is a session, and do commit or rollback manually
whenever they are needed. This is convenient and widely adopted, but it
creates a mess over transaction boundary - what **exactly** is included
when commit happens? So by default, when entering a `db.transaction`
block, a `rollback` is executed when the situation is not clear, in
order to isolate the transaction boundary to precisely where it is
defined.
And of course this behavior can be altered, globally by setting config
`SQLALCHEMY_ISOLATE_TRANSACTION` to `False`, or explicitly by setting
`isolate=False` on a `db.transaction` call. Latter overrides former.
Though `autocommit=True` is no recommended by SQLAlchemy, it is anyway
supported here. Entering `db.transaction` ensures a `begin`, the rest
stays all the same as described above.
Transactions can be nested, without setting the parameter `nested`,
which is used to select between the two different nested transaction
implementations - subtransaction (default) or savepoint. With
subtransactions, it is programed to guarantee that only all
subtransactions are committed can the DB transaction be committed; any
rollback in subtransactions - even if the exception is captured - will
lead the DB transaction to be rolled back (not immediately), commit
attempts on parent transactions shall simply fail. Differently with
savepoint, one can rollback to a savepoint and carry on in the same
transaction, and possibly commit it. Nested transactions are suitable
for cases when a reused function needs to guarantee its logic is at
least atomic when called separately, while it can also be embed into
another transaction as a whole.
The default nested transaction implementation is not **nested** - a
keyword reserved by SQLAlchemy to indicate using savepoint, reused here
to follow the same custom. It can be globally set to use savepoint by
setting config `SQLALCHEMY_NESTED_TRANSACTION` to `True`;
alternatively it can be overriden by setting `nested` parameter on a
`db.transaction` call.
:param isolate:
`True`: guarantee transaction boundary;
`False`: do not rollback at the beginning;
`None`(default): follow config `SQLALCHEMY_ISOLATE_TRANSACTION`
:param nested:
`True`: use savepoint for nested transaction;
`False`: use subtransaction for nested transaction;
`None`(default): follow config `SQLALCHEMY_NESTED_TRANSACTION`
:param kwargs:
additional key-value pairs to be set in the transaction-local
:return: a PEP 343 context object to be used by `with`
"""
session = self.session()
try:
stack = session._tx_stack
except AttributeError:
stack = session._tx_stack = deque()
is_root = len(stack) == 0
if is_root:
nested = False
item = {}
else:
item = stack[-1].copy()
if nested is None:
nested = self.get_app().config["SQLALCHEMY_NESTED_TRANSACTION"]
if isolate is None:
isolate = self.get_app().config["SQLALCHEMY_ISOLATE_TRANSACTION"]
item.update(kwargs)
stack.append(item)
try:
if is_root and not session.autocommit:
if isolate:
session.rollback()
else:
session.begin(subtransactions=True, nested=nested)
try:
yield
session.commit()
except Rollback as e:
session.rollback()
if e.propagate:
raise
if e.propagate is None and not nested:
raise
except Exception:
session.rollback()
raise
finally:
stack.pop()
@property
def tx_local(self):
"""A shared dict object associated with current (nested) transaction"""
stack = getattr(self.session(), "_tx_stack", None)
if stack:
return stack[-1]
@property
def root_tx_local(self):
"""A shared dict object associated with current DB transaction"""
stack = getattr(self.session(), "_tx_stack", None)
if stack:
return stack[0]
postgres = JazzbandSQLAlchemy(model_class=JazzbandModel)
redis = FlaskRedis.from_custom_provider(Walrus)
| mit | 3b7cc4b2abf71892a2f2c66b99a538f3 | 39.978378 | 154 | 0.634085 | 4.594545 | false | false | false | false |
jazzband/website | jazzband/account/forms.py | 3 | 1593 | from flask_login import current_user
from flask_wtf import FlaskForm
from wtforms import ValidationError, validators
from wtforms.fields import BooleanField, StringField
CONSENT_ERROR_MESSAGE = "Your consent is required to continue."
class ConsentForm(FlaskForm):
profile = BooleanField(
"I consent to fetching, processing and storing my profile "
"data which is fetched from the GitHub API.",
validators=[validators.DataRequired(CONSENT_ERROR_MESSAGE)],
)
org = BooleanField(
"I consent to fetching, processing and storing my GitHub "
"organization membership data which is fetched from the "
"GitHub API.",
validators=[validators.DataRequired(CONSENT_ERROR_MESSAGE)],
)
identify = BooleanField(
"I consent to using browser cookies for identifying me for "
"account features such as logging in and content personalizations "
"such as rendering my account dashboard.",
validators=[validators.DataRequired(CONSENT_ERROR_MESSAGE)],
)
age = BooleanField(
"I'm at least 16 years old or – if not – have permission by a "
"parent (or legal guardian) to proceed.",
validators=[validators.DataRequired(CONSENT_ERROR_MESSAGE)],
)
class LeaveForm(FlaskForm):
login = StringField("Your GitHub Login", validators=[validators.DataRequired()])
def validate_login(self, field):
if field.data != current_user.login:
raise ValidationError(
"Sorry, but that GitHub login doesn't match our records."
)
| mit | e2d361ce3ec1034fbffbfd32a47596a7 | 37.756098 | 84 | 0.685337 | 4.553009 | false | false | false | false |
valohai/valohai-cli | valohai_cli/commands/update_check.py | 1 | 2113 | import sys
from typing import Optional
import click
import requests
import valohai_cli
from valohai_cli.messages import warn
@click.command()
def update_check() -> None:
data = get_pypi_info()
current_version = valohai_cli.__version__
latest_version = data['info']['version']
click.echo(f'Your version of Valohai-CLI is {click.style(current_version, bold=True)}')
click.echo(f' The latest release on PyPI is {click.style(latest_version, bold=True)}')
upgrade_status = determine_upgrade_status(current_version, latest_version)
if upgrade_status == 'upgrade':
click.secho(
'\nGood news! An upgrade is available!\n'
'Run (e.g.) `pip install -U valohai-cli` to install the new version.',
bold=True,
fg='green',
)
click.echo('Upgrade instructions may differ based on the method you\'ve installed the application with.')
sys.exit(1)
elif upgrade_status == 'delorean':
click.secho(
'\nWhen this thing gets up to 88 mph... You seem to be running a version from the future!\n',
bold=True,
fg='cyan',
)
elif upgrade_status == 'current':
click.echo('\nYou seem to be running the latest and greatest. Good on you!')
def determine_upgrade_status(current_version: str, latest_version: str) -> Optional[str]:
try:
from distutils.version import LooseVersion
parsed_current_version = LooseVersion(current_version)
parsed_latest_version = LooseVersion(latest_version)
if parsed_latest_version > parsed_current_version:
return 'upgrade'
elif parsed_latest_version < parsed_current_version:
return 'delorean'
elif parsed_latest_version == parsed_current_version:
return 'current'
except Exception as exc:
warn(f'Unable to determine whether the version is older or newer ({exc})')
return None
def get_pypi_info() -> dict:
resp = requests.get('https://pypi.org/pypi/valohai-cli/json')
resp.raise_for_status()
return dict(resp.json())
| mit | f9995fd181afc1cd8200a33325858e3c | 35.431034 | 113 | 0.648841 | 3.920223 | false | true | false | false |
jazzband/website | jazzband/projects/forms.py | 3 | 4216 | import re
from flask_login import current_user
from flask_wtf import FlaskForm
from flask_wtf.file import FileAllowed, FileField, FileRequired
from packaging import version
from wtforms import StringField, SubmitField, ValidationError, validators
_project_name_re = re.compile(
r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
)
UPLOAD_EXTENSIONS = [
"exe",
"tar.gz",
"bz2",
"rpm",
"deb",
"zip",
"tgz",
"egg",
"dmg",
"msi",
"whl",
]
def _validate_pep440_version(form, field):
parsed = version.parse(field.data)
# Check that this version is a valid PEP 440 version at all.
if not isinstance(parsed, version.Version):
raise validators.ValidationError(
"Must start and end with a letter or numeral and contain only "
"ascii numeric and '.', '_' and '-'."
)
# Check that this version does not have a PEP 440 local segment attached
# to it.
if parsed.local is not None:
raise validators.ValidationError("Cannot use PEP 440 local versions.")
class UploadForm(FlaskForm):
# Identity Project and Release
name = StringField(
validators=[
validators.DataRequired(),
validators.Regexp(
_project_name_re,
re.IGNORECASE,
message=(
"Must start and end with a letter or numeral and contain "
"only ascii numeric and '.', '_' and '-'."
),
),
]
)
version = StringField(
validators=[
validators.DataRequired(),
validators.Regexp(
r"^(?!\s).*(?<!\s)$",
message="Cannot have leading or trailing whitespace.",
),
_validate_pep440_version,
]
)
content = FileField(
validators=[
FileRequired("Upload payload does not have a file."),
FileAllowed(UPLOAD_EXTENSIONS, "Invalid file extension."),
]
)
gpg_signature = FileField(
validators=[
validators.Optional(),
FileAllowed(["asc"], "Invalid file extension."),
]
)
md5_digest = StringField(validators=[validators.Optional()])
sha256_digest = StringField(
validators=[
validators.Optional(),
validators.Regexp(
r"^[A-F0-9]{64}$",
re.IGNORECASE,
message="Must be a valid, hex encoded, SHA256 message digest.",
),
]
)
blake2_256_digest = StringField(
validators=[
validators.Optional(),
validators.Regexp(
r"^[A-F0-9]{64}$",
re.IGNORECASE,
message="Must be a valid, hex encoded, blake2 message digest.",
),
]
)
def validate_content(form, field):
if field.data and ("/" in field.data or "\\" in field.data):
raise ValidationError("Cannot upload a file with '/' or '\\' in the name.")
class ProjectNameForm(FlaskForm):
project_name = StringField("Project name", validators=[validators.DataRequired()])
def __init__(self, project_name, *args, **kwargs):
self._project_name = project_name
super().__init__(*args, **kwargs)
def validate_project_name(self, field):
if field.data != self._project_name:
raise ValidationError("Sorry, but the entered project name doesn't match.")
class TwoFactorAuthValidation:
submit = SubmitField()
def validate_submit(self, field):
if not current_user.has_2fa:
raise ValidationError(
"Sorry, but to release the upload you need to have "
"Two Factor Auth (2FA) enabled on GitHub."
)
class ReleaseForm(TwoFactorAuthValidation, ProjectNameForm):
submit = SubmitField("Release")
def __init__(self, *args, **kwargs):
self.global_errors = []
super().__init__(*args, **kwargs)
def add_global_error(self, *messages):
self.global_errors.extend(messages)
class DeleteForm(TwoFactorAuthValidation, ProjectNameForm):
submit = SubmitField("Delete")
| mit | c6cceb25999ed9f723b7e8ee2a804e7b | 27.486486 | 87 | 0.571632 | 4.328542 | false | false | false | false |
jazzband/website | jazzband/members/tasks.py | 1 | 1612 | import logging
from datetime import timedelta
from spinach import Tasks
from ..account import github
from ..config import ONE_MINUTE
from ..db import postgres, redis
from .models import EmailAddress, User
logger = logging.getLogger(__name__)
tasks = Tasks()
@tasks.task(name="sync_members", periodicity=timedelta(minutes=30), max_retries=3)
def sync_members():
# use a lock to make sure we don't run this multiple times
with redis.lock("sync_members", ttl=ONE_MINUTE * 14):
members_data = github.get_members()
User.sync(members_data)
stored_ids = {user.id for user in User.query.all()}
fetched_ids = {m["id"] for m in members_data}
stale_ids = stored_ids - fetched_ids
if stale_ids:
User.query.filter(User.id.in_(stale_ids)).update(
{"is_member": False}, "fetch"
)
postgres.session.commit()
@tasks.task(name="sync_email_addresses", max_retries=5)
def sync_email_addresses(user_id):
"Sync email addresses for user"
# load user or raise an exception if not found
user = User.query.filter(User.id == user_id).one()
if not user.access_token:
raise ValueError(f"No access token for user {user.login}")
logger.info(
"Updating emails for user %s with access token %s..",
user_id,
user.access_token[:6],
)
email_addresses = []
for email_item in github.get_emails(user):
email_item["user_id"] = user.id
email_addresses.append(email_item)
EmailAddress.sync(email_addresses, key="email")
return email_addresses
| mit | e827ee75bbe276c5ae9498de57743452 | 27.280702 | 82 | 0.646402 | 3.638826 | false | false | false | false |
heyman/locust | locust/clients.py | 1 | 11870 | import re
import time
import requests
import six
from requests import Request, Response
from requests.auth import HTTPBasicAuth
from requests.exceptions import (InvalidSchema, InvalidURL, MissingSchema,
RequestException)
from six.moves.urllib.parse import urlparse, urlunparse
from . import events
from .exception import CatchResponseError, ResponseError
absolute_http_url_regexp = re.compile(r"^https?://", re.I)
class LocustResponse(Response):
def raise_for_status(self):
if hasattr(self, 'error') and self.error:
raise self.error
Response.raise_for_status(self)
class HttpSession(requests.Session):
"""
Class for performing web requests and holding (session-) cookies between requests (in order
to be able to log in and out of websites). Each request is logged so that locust can display
statistics.
This is a slightly extended version of `python-request <http://python-requests.org>`_'s
:py:class:`requests.Session` class and mostly this class works exactly the same. However
the methods for making requests (get, post, delete, put, head, options, patch, request)
can now take a *url* argument that's only the path part of the URL, in which case the host
part of the URL will be prepended with the HttpSession.base_url which is normally inherited
from a Locust class' host property.
Each of the methods for making requests also takes two additional optional arguments which
are Locust specific and doesn't exist in python-requests. These are:
:param name: (optional) An argument that can be specified to use as label in Locust's statistics instead of the URL path.
This can be used to group different URL's that are requested into a single entry in Locust's statistics.
:param catch_response: (optional) Boolean argument that, if set, can be used to make a request return a context manager
to work as argument to a with statement. This will allow the request to be marked as a fail based on the content of the
response, even if the response code is ok (2xx). The opposite also works, one can use catch_response to catch a request
and then mark it as successful even if the response code was not (i.e 500 or 404).
"""
def __init__(self, base_url, *args, **kwargs):
super(HttpSession, self).__init__(*args, **kwargs)
self.base_url = base_url
# Check for basic authentication
parsed_url = urlparse(self.base_url)
if parsed_url.username and parsed_url.password:
netloc = parsed_url.hostname
if parsed_url.port:
netloc += ":%d" % parsed_url.port
# remove username and password from the base_url
self.base_url = urlunparse((parsed_url.scheme, netloc, parsed_url.path, parsed_url.params, parsed_url.query, parsed_url.fragment))
# configure requests to use basic auth
self.auth = HTTPBasicAuth(parsed_url.username, parsed_url.password)
def _build_url(self, path):
""" prepend url with hostname unless it's already an absolute URL """
if absolute_http_url_regexp.match(path):
return path
else:
return "%s%s" % (self.base_url, path)
def request(self, method, url, name=None, catch_response=False, **kwargs):
"""
Constructs and sends a :py:class:`requests.Request`.
Returns :py:class:`requests.Response` object.
:param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request` object.
:param name: (optional) An argument that can be specified to use as label in Locust's statistics instead of the URL path.
This can be used to group different URL's that are requested into a single entry in Locust's statistics.
:param catch_response: (optional) Boolean argument that, if set, can be used to make a request return a context manager
to work as argument to a with statement. This will allow the request to be marked as a fail based on the content of the
response, even if the response code is ok (2xx). The opposite also works, one can use catch_response to catch a request
and then mark it as successful even if the response code was not (i.e 500 or 404).
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
:param files: (optional) Dictionary of ``'filename': file-like-objects`` for multipart encoding upload.
:param auth: (optional) Auth tuple or callable to enable Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) How long in seconds to wait for the server to send data before giving up, as a float,
or a (`connect timeout, read timeout <user/advanced.html#timeouts>`_) tuple.
:type timeout: float or tuple
:param allow_redirects: (optional) Set to True by default.
:type allow_redirects: bool
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
:param stream: (optional) whether to immediately download the response content. Defaults to ``False``.
:param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
"""
# prepend url with hostname unless it's already an absolute URL
url = self._build_url(url)
# store meta data that is used when reporting the request to locust's statistics
request_meta = {}
# set up pre_request hook for attaching meta data to the request object
request_meta["method"] = method
request_meta["start_time"] = time.time()
response = self._send_request_safe_mode(method, url, **kwargs)
# record the consumed time
request_meta["response_time"] = (time.time() - request_meta["start_time"]) * 1000
request_meta["name"] = name or (response.history and response.history[0] or response).request.path_url
# get the length of the content, but if the argument stream is set to True, we take
# the size from the content-length header, in order to not trigger fetching of the body
if kwargs.get("stream", False):
request_meta["content_size"] = int(response.headers.get("content-length") or 0)
else:
request_meta["content_size"] = len(response.content or b"")
if catch_response:
response.locust_request_meta = request_meta
return ResponseContextManager(response)
else:
if name:
# Since we use the Exception message when grouping failures, in order to not get
# multiple failure entries for different URLs for the same name argument, we need
# to temporarily override the reponse.url attribute
orig_url = response.url
response.url = name
try:
response.raise_for_status()
except RequestException as e:
events.request_failure.fire(
request_type=request_meta["method"],
name=request_meta["name"],
response_time=request_meta["response_time"],
exception=e,
)
else:
events.request_success.fire(
request_type=request_meta["method"],
name=request_meta["name"],
response_time=request_meta["response_time"],
response_length=request_meta["content_size"],
)
if name:
response.url = orig_url
return response
def _send_request_safe_mode(self, method, url, **kwargs):
"""
Send an HTTP request, and catch any exception that might occur due to connection problems.
Safe mode has been removed from requests 1.x.
"""
try:
return requests.Session.request(self, method, url, **kwargs)
except (MissingSchema, InvalidSchema, InvalidURL):
raise
except RequestException as e:
r = LocustResponse()
r.error = e
r.status_code = 0 # with this status_code, content returns None
r.request = Request(method, url).prepare()
return r
class ResponseContextManager(LocustResponse):
"""
A Response class that also acts as a context manager that provides the ability to manually
control if an HTTP request should be marked as successful or a failure in Locust's statistics
This class is a subclass of :py:class:`Response <requests.Response>` with two additional
methods: :py:meth:`success <locust.clients.ResponseContextManager.success>` and
:py:meth:`failure <locust.clients.ResponseContextManager.failure>`.
"""
_is_reported = False
def __init__(self, response):
# copy data from response to this object
self.__dict__ = response.__dict__
def __enter__(self):
return self
def __exit__(self, exc, value, traceback):
if self._is_reported:
# if the user has already manually marked this response as failure or success
# we can ignore the default haviour of letting the response code determine the outcome
return exc is None
if exc:
if isinstance(value, ResponseError):
self.failure(value)
else:
return False
else:
try:
self.raise_for_status()
except requests.exceptions.RequestException as e:
self.failure(e)
else:
self.success()
return True
def success(self):
"""
Report the response as successful
Example::
with self.client.get("/does/not/exist", catch_response=True) as response:
if response.status_code == 404:
response.success()
"""
events.request_success.fire(
request_type=self.locust_request_meta["method"],
name=self.locust_request_meta["name"],
response_time=self.locust_request_meta["response_time"],
response_length=self.locust_request_meta["content_size"],
)
self._is_reported = True
def failure(self, exc):
"""
Report the response as a failure.
exc can be either a python exception, or a string in which case it will
be wrapped inside a CatchResponseError.
Example::
with self.client.get("/", catch_response=True) as response:
if response.content == b"":
response.failure("No data")
"""
if isinstance(exc, six.string_types):
exc = CatchResponseError(exc)
events.request_failure.fire(
request_type=self.locust_request_meta["method"],
name=self.locust_request_meta["name"],
response_time=self.locust_request_meta["response_time"],
exception=exc,
)
self._is_reported = True
| mit | fa8324be771fc1c52811123c3ea146ad | 45.367188 | 147 | 0.619208 | 4.597211 | false | false | false | false |
heyman/locust | locust/contrib/fasthttp.py | 1 | 15632 | from __future__ import absolute_import
import chardet
import re
import six
import socket
from base64 import b64encode
from six.moves.urllib.parse import urlparse, urlunparse
from ssl import SSLError
from timeit import default_timer
if six.PY2:
from cookielib import CookieJar
class ConnectionRefusedError(Exception):
# ConnectionRefusedError doesn't exist in python 2, so we'll
# define a dummy class to avoid a NameError
pass
else:
from http.cookiejar import CookieJar
unicode = str
from gevent.timeout import Timeout
from geventhttpclient.useragent import UserAgent, CompatRequest, CompatResponse, ConnectionError
from geventhttpclient.response import HTTPConnectionClosed
from locust import events
from locust.core import Locust
from locust.exception import LocustError, CatchResponseError, ResponseError
# Monkey patch geventhttpclient.useragent.CompatRequest so that Cookiejar works with Python >= 3.3
# More info: https://github.com/requests/requests/pull/871
CompatRequest.unverifiable = False
# Regexp for checking if an absolute URL was specified
absolute_http_url_regexp = re.compile(r"^https?://", re.I)
# List of exceptions that can be raised by geventhttpclient when sending an HTTP request,
# and that should result in a Locust failure
FAILURE_EXCEPTIONS = (ConnectionError, ConnectionRefusedError, socket.error, \
SSLError, Timeout, HTTPConnectionClosed)
def _construct_basic_auth_str(username, password):
"""Construct Authorization header value to be used in HTTP Basic Auth"""
if isinstance(username, str):
username = username.encode('latin1')
if isinstance(password, str):
password = password.encode('latin1')
return 'Basic ' + b64encode(b':'.join((username, password))).strip().decode("ascii")
class FastHttpLocust(Locust):
"""
Represents an HTTP "user" which is to be hatched and attack the system that is to be load tested.
The behaviour of this user is defined by the task_set attribute, which should point to a
:py:class:`TaskSet <locust.core.TaskSet>` class.
This class creates a *client* attribute on instantiation which is an HTTP client with support
for keeping a user session between requests.
"""
client = None
"""
Instance of HttpSession that is created upon instantiation of Locust.
The client support cookies, and therefore keeps the session between HTTP requests.
"""
def __init__(self):
super(FastHttpLocust, self).__init__()
if self.host is None:
raise LocustError("You must specify the base host. Either in the host attribute in the Locust class, or on the command line using the --host option.")
if not re.match(r"^https?://[^/]+$", self.host, re.I):
raise LocustError("Invalid host (`%s`). The specified host string must be a base URL without a trailing slash. E.g. http://example.org" % self.host)
self.client = FastHttpSession(base_url=self.host)
class FastHttpSession(object):
auth_header = None
def __init__(self, base_url, **kwargs):
self.base_url = base_url
self.cookiejar = CookieJar()
self.client = LocustUserAgent(max_retries=1, cookiejar=self.cookiejar, **kwargs)
# Check for basic authentication
parsed_url = urlparse(self.base_url)
if parsed_url.username and parsed_url.password:
netloc = parsed_url.hostname
if parsed_url.port:
netloc += ":%d" % parsed_url.port
# remove username and password from the base_url
self.base_url = urlunparse((parsed_url.scheme, netloc, parsed_url.path, parsed_url.params, parsed_url.query, parsed_url.fragment))
# store authentication header (we construct this by using _basic_auth_str() function from requests.auth)
self.auth_header = _construct_basic_auth_str(parsed_url.username, parsed_url.password)
def _build_url(self, path):
""" prepend url with hostname unless it's already an absolute URL """
if absolute_http_url_regexp.match(path):
return path
else:
return "%s%s" % (self.base_url, path)
def _send_request_safe_mode(self, method, url, **kwargs):
"""
Send an HTTP request, and catch any exception that might occur due to either
connection problems, or invalid HTTP status codes
"""
try:
return self.client.urlopen(url, method=method, **kwargs)
except FAILURE_EXCEPTIONS as e:
if hasattr(e, "response"):
r = e.response
else:
r = ErrorResponse()
r.error = e
return r
def request(self, method, path, name=None, data=None, catch_response=False, stream=False, \
headers=None, auth=None, **kwargs):
"""
Send and HTTP request
Returns :py:class:`locust.contrib.fasthttp.FastResponse` object.
:param method: method for the new :class:`Request` object.
:param path: Path that will be concatenated with the base host URL that has been specified.
Can also be a full URL, in which case the full URL will be requested, and the base host
is ignored.
:param name: (optional) An argument that can be specified to use as label in Locust's
statistics instead of the URL path. This can be used to group different URL's
that are requested into a single entry in Locust's statistics.
:param catch_response: (optional) Boolean argument that, if set, can be used to make a request
return a context manager to work as argument to a with statement. This will allow the
request to be marked as a fail based on the content of the response, even if the response
code is ok (2xx). The opposite also works, one can use catch_response to catch a request
and then mark it as successful even if the response code was not (i.e 500 or 404).
:param data: (optional) Dictionary or bytes to send in the body of the request.
:param headers: (optional) Dictionary of HTTP Headers to send with the request.
:param auth: (optional) Auth (username, password) tuple to enable Basic HTTP Auth.
:param stream: (optional) If set to true the response body will not be consumed immediately
and can instead be consumed by accessing the stream attribute on the Response object.
Another side effect of setting stream to True is that the time for downloading the response
content will not be accounted for in the request time that is reported by Locust.
"""
# prepend url with hostname unless it's already an absolute URL
url = self._build_url(path)
# store meta data that is used when reporting the request to locust's statistics
request_meta = {}
# set up pre_request hook for attaching meta data to the request object
request_meta["method"] = method
request_meta["start_time"] = default_timer()
request_meta["name"] = name or path
if auth:
headers = headers or {}
headers['Authorization'] = _construct_basic_auth_str(auth[0], auth[1])
elif self.auth_header:
headers = headers or {}
headers['Authorization'] = self.auth_header
# send request, and catch any exceptions
response = self._send_request_safe_mode(method, url, payload=data, headers=headers, **kwargs)
# get the length of the content, but if the argument stream is set to True, we take
# the size from the content-length header, in order to not trigger fetching of the body
if stream:
request_meta["content_size"] = int(response.headers.get("content-length") or 0)
else:
request_meta["content_size"] = len(response.content or "")
# Record the consumed time
# Note: This is intentionally placed after we record the content_size above, since
# we'll then trigger fetching of the body (unless stream=True)
request_meta["response_time"] = int((default_timer() - request_meta["start_time"]) * 1000)
if catch_response:
response.locust_request_meta = request_meta
return ResponseContextManager(response)
else:
try:
response.raise_for_status()
except FAILURE_EXCEPTIONS as e:
events.request_failure.fire(
request_type=request_meta["method"],
name=request_meta["name"],
response_time=request_meta["response_time"],
exception=e,
)
else:
events.request_success.fire(
request_type=request_meta["method"],
name=request_meta["name"],
response_time=request_meta["response_time"],
response_length=request_meta["content_size"],
)
return response
def delete(self, path, **kwargs):
return self.request("DELETE", path, **kwargs)
def get(self, path, **kwargs):
"""Sends a GET request"""
return self.request("GET", path, **kwargs)
def head(self, path, **kwargs):
"""Sends a HEAD request"""
return self.request("HEAD", path, **kwargs)
def options(self, path, **kwargs):
"""Sends a OPTIONS request"""
return self.request("OPTIONS", path, **kwargs)
def patch(self, path, data=None, **kwargs):
"""Sends a POST request"""
return self.request("PATCH", path, data=data, **kwargs)
def post(self, path, data=None, **kwargs):
"""Sends a POST request"""
return self.request("POST", path, data=data, **kwargs)
def put(self, path, data=None, **kwargs):
"""Sends a PUT request"""
return self.request("PUT", path, data=data, **kwargs)
class FastResponse(CompatResponse):
headers = None
"""Dict like object containing the response headers"""
_response = None
@property
def text(self):
"""
Returns the text content of the response as a decoded string
(unicode on python2)
"""
# Decode unicode from detected encoding.
try:
content = unicode(self.content, self.apparent_encoding, errors='replace')
except (LookupError, TypeError):
# A LookupError is raised if the encoding was not found which could
# indicate a misspelling or similar mistake.
#
# A TypeError can be raised if encoding is None
#
# Fallback to decode without specifying encoding
content = unicode(self.content, errors='replace')
return content
@property
def apparent_encoding(self):
"""The apparent encoding, provided by the chardet library."""
return chardet.detect(self.content)['encoding']
def raise_for_status(self):
"""Raise any connection errors that occured during the request"""
if hasattr(self, 'error') and self.error:
raise self.error
@property
def status_code(self):
"""
We override status_code in order to return None if no valid response was
returned. E.g. in the case of connection errors
"""
return self._response is not None and self._response.get_code() or 0
def _content(self):
if self.headers is None:
return None
return super(FastResponse, self)._content()
class ErrorResponse(object):
"""
This is used as a dummy response object when geventhttpclient raises an error
that doesn't have a real Response object attached. E.g. a socket error or similar
"""
headers = None
content = None
status_code = 0
error = None
text = None
def raise_for_status(self):
raise self.error
class LocustUserAgent(UserAgent):
response_type = FastResponse
def _urlopen(self, request):
"""Override _urlopen() in order to make it use the response_type attribute"""
client = self.clientpool.get_client(request.url_split)
resp = client.request(request.method, request.url_split.request_uri,
body=request.payload, headers=request.headers)
return self.response_type(resp, request=request, sent_request=resp._sent_request)
class ResponseContextManager(FastResponse):
"""
A Response class that also acts as a context manager that provides the ability to manually
control if an HTTP request should be marked as successful or a failure in Locust's statistics
This class is a subclass of :py:class:`FastResponse <locust.contrib.fasthttp.FastResponse>`
with two additional methods: :py:meth:`success <locust.contrib.fasthttp.ResponseContextManager.success>`
and :py:meth:`failure <locust.contrib.fasthttp.ResponseContextManager.failure>`.
"""
_is_reported = False
def __init__(self, response):
# copy data from response to this object
self.__dict__ = response.__dict__
self._cached_content = response.content
def __enter__(self):
return self
def __exit__(self, exc, value, traceback):
if self._is_reported:
# if the user has already manually marked this response as failure or success
# we can ignore the default haviour of letting the response code determine the outcome
return exc is None
if exc:
if isinstance(value, ResponseError):
self.failure(value)
else:
return False
else:
try:
self.raise_for_status()
except FAILURE_EXCEPTIONS as e:
self.failure(e)
else:
self.success()
return True
def success(self):
"""
Report the response as successful
Example::
with self.client.get("/does/not/exist", catch_response=True) as response:
if response.status_code == 404:
response.success()
"""
events.request_success.fire(
request_type=self.locust_request_meta["method"],
name=self.locust_request_meta["name"],
response_time=self.locust_request_meta["response_time"],
response_length=self.locust_request_meta["content_size"],
)
self._is_reported = True
def failure(self, exc):
"""
Report the response as a failure.
exc can be either a python exception, or a string in which case it will
be wrapped inside a CatchResponseError.
Example::
with self.client.get("/", catch_response=True) as response:
if response.content == "":
response.failure("No data")
"""
if isinstance(exc, six.string_types):
exc = CatchResponseError(exc)
events.request_failure.fire(
request_type=self.locust_request_meta["method"],
name=self.locust_request_meta["name"],
response_time=self.locust_request_meta["response_time"],
exception=exc,
)
self._is_reported = True
| mit | ab852c1a8d712f54c38d481db8d68c2b | 39.708333 | 162 | 0.623721 | 4.494537 | false | false | false | false |
harrystech/arthur-redshift-etl | python/etl/util/timer.py | 1 | 1460 | """Timer class for when you need to measure the elapsed time in seconds."""
import datetime
def utc_now() -> datetime.datetime:
"""
Return the current time for timezone UTC.
Unlike datetime.utcnow(), this timestamp is timezone-aware.
"""
return datetime.datetime.now(datetime.timezone.utc)
def elapsed_seconds(start: datetime.datetime, end: datetime.datetime = None) -> float:
"""Return number of seconds elapsed between start time and end time (or now)."""
return ((end or utc_now()) - start).total_seconds()
class Timer:
"""
Context manager class to measure elapsed (wall clock) time in seconds.
>>> with Timer() as t:
... pass
...
>>> str(t)
'0.00s'
"""
def __init__(self):
self._start = utc_now()
self._end = None
def __enter__(self):
"""Set start time when entering context."""
self._start = utc_now()
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
"""Set end time when exiting context."""
self._end = utc_now()
def __str__(self):
# Breaking with tradition, the timer instances return the elapsed time, not a description
# of the instance.
return "%.2fs" % self.elapsed
@property
def elapsed(self):
"""Return elapsed time in seconds (wall clock time, between start and end of context)."""
return elapsed_seconds(self._start, self._end)
| mit | 9363ae818bf94b253ae589700c1e08fd | 27.076923 | 97 | 0.613014 | 4.147727 | false | false | false | false |
tobi-wan-kenobi/bumblebee-status | bumblebee_status/modules/contrib/cmus.py | 1 | 5688 | # pylint: disable=C0111,R0903
"""Displays information about the current song in cmus.
Requires the following executable:
* cmus-remote
Parameters:
* cmus.format: Format string for the song information. Tag values can be put in curly brackets (i.e. {artist})
Additional tags:
* {file} - full song file name
* {file1} - song file name without path prefix
if {file} = '/foo/bar.baz', then {file1} = 'bar.baz'
* {file2} - song file name without path prefix and extension suffix
if {file} = '/foo/bar.baz', then {file2} = 'bar'
* cmus.layout: Space-separated list of widgets to add. Possible widgets are the buttons/toggles cmus.prev, cmus.next, cmus.shuffle and cmus.repeat, and the main display with play/pause function cmus.main.
* cmus.server: The address of the cmus server, either a UNIX socket or host[:port]. Connects to the local instance by default.
* cmus.passwd: The password to use for the TCP/IP connection.
contributed by `TheEdgeOfRage <https://github.com/TheEdgeOfRage>`_ - many thanks!
"""
from collections import defaultdict
import os
import string
import core.module
import core.input
import core.decorators
import util.cli
import util.format
class Module(core.module.Module):
def __init__(self, config, theme):
super().__init__(config, theme, [])
self._layout = self.parameter(
"layout", "cmus.prev cmus.main cmus.next cmus.shuffle cmus.repeat"
)
self._fmt = self.parameter("format", "{artist} - {title} {position}/{duration}")
self._server = self.parameter("server", None)
self._passwd = self.parameter("passwd", None)
self._status = None
self._shuffle = False
self._repeat = False
self._tags = defaultdict(lambda: "")
# Create widgets
widget_map = {}
for widget_name in self._layout.split():
widget = self.add_widget(name=widget_name)
self._cmd = "cmus-remote"
if self._server is not None:
self._cmd = "{cmd} --server {server}".format(
cmd=self._cmd, server=self._server
)
if self._passwd is not None:
self._cmd = "{cmd} --passwd {passwd}".format(
cmd=self._cmd, passwd=self._passwd
)
if widget_name == "cmus.prev":
widget_map[widget] = {
"button": core.input.LEFT_MOUSE,
"cmd": "{cmd} -r".format(cmd=self._cmd),
}
elif widget_name == "cmus.main":
widget_map[widget] = {
"button": core.input.LEFT_MOUSE,
"cmd": "{cmd} -u".format(cmd=self._cmd),
}
widget.full_text(self.description)
elif widget_name == "cmus.next":
widget_map[widget] = {
"button": core.input.LEFT_MOUSE,
"cmd": "{cmd} -n".format(cmd=self._cmd),
}
elif widget_name == "cmus.shuffle":
widget_map[widget] = {
"button": core.input.LEFT_MOUSE,
"cmd": "{cmd} -S".format(cmd=self._cmd),
}
elif widget_name == "cmus.repeat":
widget_map[widget] = {
"button": core.input.LEFT_MOUSE,
"cmd": "{cmd} -R".format(cmd=self._cmd),
}
else:
raise KeyError(
"The cmus module does not support a {widget_name!r} widget".format(
widget_name=widget_name
)
)
# Register input callbacks
for widget, callback_options in widget_map.items():
core.input.register(widget, **callback_options)
def hidden(self):
return self._status is None
@core.decorators.scrollable
def description(self, widget):
return string.Formatter().vformat(self._fmt, (), self._tags)
def update(self):
self._load_song()
def state(self, widget):
returns = {
"cmus.shuffle": "shuffle-on" if self._shuffle else "shuffle-off",
"cmus.repeat": "repeat-on" if self._repeat else "repeat-off",
"cmus.prev": "prev",
"cmus.next": "next",
}
return returns.get(widget.name, self._status)
def _eval_line(self, line):
if line.startswith("file "):
full_file = line[5:]
file1 = os.path.basename(full_file)
file2 = os.path.splitext(file1)[0]
self._tags.update({"file": full_file})
self._tags.update({"file1": file1})
self._tags.update({"file2": file2})
return
name, key, value = (line.split(" ", 2) + [None, None])[:3]
if name == "status":
self._status = key
if name == "tag":
self._tags.update({key: value})
if name in ["duration", "position"]:
self._tags.update({name: util.format.duration(int(key))})
if name == "set" and key == "repeat":
self._repeat = value == "true"
if name == "set" and key == "shuffle":
self._shuffle = value == "true"
def _load_song(self):
info = ""
try:
info = util.cli.execute("{cmd} -Q".format(cmd=self._cmd))
except RuntimeError:
self._status = None
self._tags = defaultdict(lambda: "")
for line in info.split("\n"):
self._eval_line(line)
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| mit | 6229378fb636b4dce1afe42fb102addb | 35.461538 | 208 | 0.534986 | 3.966527 | false | false | false | false |
tobi-wan-kenobi/bumblebee-status | bumblebee_status/modules/contrib/pihole.py | 1 | 2850 | # pylint: disable=C0111,R0903
"""Displays the pi-hole status (up/down) together with the number of ads that were blocked today
Parameters:
* pihole.address : pi-hole address (e.q: http://192.168.1.3)
* pihole.pwhash : pi-hole webinterface password hash (can be obtained from the /etc/pihole/SetupVars.conf file)
contributed by `bbernhard <https://github.com/bbernhard>`_ - many thanks!
"""
import requests
import core.module
import core.widget
import core.input
class Module(core.module.Module):
@core.decorators.every(minutes=1)
def __init__(self, config, theme):
super().__init__(config, theme, core.widget.Widget(self.pihole_status))
self._pihole_address = self.parameter("address", "")
self._pihole_pw_hash = self.parameter("pwhash", "")
self._pihole_status = None
self._ads_blocked_today = "-"
self.update_pihole_status()
core.input.register(
self, button=core.input.LEFT_MOUSE, cmd=self.toggle_pihole_status
)
def pihole_status(self, widget):
if self._pihole_status is None:
return "pi-hole unknown"
return "pi-hole {}".format(
"up {} blocked".format(self._ads_blocked_today)
if self._pihole_status
else "down"
)
def update_pihole_status(self):
try:
data = requests.get(self._pihole_address + "/admin/api.php?summary").json()
self._pihole_status = True if data["status"] == "enabled" else False
self._ads_blocked_today = data["ads_blocked_today"]
except Exception as e:
self._pihole_status = None
def toggle_pihole_status(self, widget):
if self._pihole_status is not None:
try:
req = None
if self._pihole_status:
req = requests.get(
self._pihole_address
+ "/admin/api.php?disable&auth="
+ self._pihole_pw_hash
)
else:
req = requests.get(
self._pihole_address
+ "/admin/api.php?enable&auth="
+ self._pihole_pw_hash
)
if req is not None:
if req.status_code == 200:
status = req.json()["status"]
self._pihole_status = False if status == "disabled" else True
except:
pass
def update(self):
self.update_pihole_status()
def state(self, widget):
if self._pihole_status is None:
return []
elif self._pihole_status:
return ["enabled"]
return ["disabled", "warning"]
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| mit | 4449333349ae6bca57d16a080a239236 | 32.529412 | 120 | 0.543158 | 4.014085 | false | false | false | false |
tobi-wan-kenobi/bumblebee-status | tests/modules/contrib/test_network_traffic.py | 1 | 2991 | import pytest
from unittest import TestCase, mock
import core.config
import core.widget
import modules.contrib.network_traffic
from types import SimpleNamespace
pytest.importorskip("psutil")
pytest.importorskip("netifaces")
def io_counters_mock(recv, sent):
return {
'lo': SimpleNamespace(
bytes_sent = sent,
bytes_recv = recv
)
}
def gateways_response():
return {
'default': {
1: ('10.0.0.10', 'lo')
}
}
def build_module():
config = core.config.Config([])
return modules.contrib.network_traffic.Module(config=config, theme=None)
def download_widget(module):
return module.widgets()[0]
def upload_widget(module):
return module.widgets()[1]
def mb_to_bytes(value):
return value*1024**2
class TestNetworkTrafficUnit(TestCase):
def test_load_module(self):
__import__("modules.contrib.network_traffic")
def test_initial_download_rate(self):
module = build_module()
assert download_widget(module).full_text() == '0.00B/s'
def test_initial_upload_rate(self):
module = build_module()
assert upload_widget(module).full_text() == '0.00B/s'
@mock.patch('netifaces.gateways')
def test_invalid_gateways(self, gateways_mock):
gateways_mock.return_value = { 'invalid': 'gateways' }
module = build_module()
assert download_widget(module).full_text() == '0.00B/s'
assert upload_widget(module).full_text() == '0.00B/s'
@mock.patch('psutil.net_io_counters')
def test_invalid_io_counters(self, net_io_counters_mock):
net_io_counters_mock.return_value = { 'invalid': 'io_counters' }
module = build_module()
assert download_widget(module).full_text() == '0.00B/s'
assert upload_widget(module).full_text() == '0.00B/s'
@mock.patch('psutil.net_io_counters')
@mock.patch('netifaces.gateways')
@mock.patch('netifaces.AF_INET', 1)
def test_update_rates(self, gateways_mock, net_io_counters_mock):
net_io_counters_mock.return_value = io_counters_mock(0, 0)
gateways_mock.return_value = gateways_response()
module = build_module()
assert download_widget(module).full_text() == '0.00B/s'
assert upload_widget(module).full_text() == '0.00B/s'
net_io_counters_mock.return_value = io_counters_mock(
mb_to_bytes(30),
mb_to_bytes(0.5)
)
module.update()
assert download_widget(module).full_text() == '30.00MiB/s'
assert upload_widget(module).full_text() == '512.00KiB/s'
def test_widget_states(self):
module = build_module()
assert module.state(download_widget(module)) == 'rx'
assert module.state(upload_widget(module)) == 'tx'
def test_invalid_widget_state(self):
module = build_module()
invalid_widget = core.widget.Widget(name='invalid')
assert module.state(invalid_widget) == None
| mit | e1485a0e78eeaa471258e46be3fdaa3c | 28.038835 | 76 | 0.629555 | 3.422197 | false | true | false | false |
tobi-wan-kenobi/bumblebee-status | bumblebee_status/modules/contrib/github.py | 1 | 3373 | # pylint: disable=C0111,R0903
"""
Displays the unread GitHub notifications count for a GitHub user using the following reasons:
* https://developer.github.com/v3/activity/notifications/#notification-reasons
Uses `xdg-open` or `x-www-browser` to open web-pages.
Requires the following library:
* requests
Parameters:
* github.token: GitHub user access token, the token needs to have the 'notifications' scope.
* github.interval: Interval in minutes between updates, default is 5.
* github.reasons: Comma separated reasons to be parsed (e.g.: github.reasons=mention,team_mention,review_requested)
contributed by:
* v1 - `yvesh <https://github.com/yvesh>`_ - many thanks!
* v2 - `cristianmiranda <https://github.com/cristianmiranda>`_ - many thanks!
"""
import shutil
import requests
import core.module
import core.widget
import core.decorators
import core.input
import util.format
class Module(core.module.Module):
@core.decorators.every(minutes=5)
def __init__(self, config, theme):
super().__init__(config, theme, core.widget.Widget(self.github))
self.background = True
self.__count = 0
self.__label = ""
self.__requests = requests.Session()
self.__requests.headers.update(
{"Authorization": "token {}".format(self.parameter("token", ""))}
)
self.__reasons = []
reasons = self.parameter("reasons", "")
if reasons:
self.__reasons = util.format.aslist(reasons)
cmd = "xdg-open"
if not shutil.which(cmd):
cmd = "x-www-browser"
core.input.register(
self,
button=core.input.LEFT_MOUSE,
cmd="{} https://github.com/notifications".format(cmd),
)
def github(self, _):
return str(self.__label)
def update(self):
try:
url = "https://api.github.com/notifications"
notifications = self.__requests.get(url)
total = self.__getTotalUnreadNotificationsCount(notifications)
self.__count = total
self.__label = str(total)
counts = []
if len(self.__reasons) > 0:
for reason in self.__reasons:
unread = self.__getUnreadNotificationsCountByReason(
notifications, reason
)
counts.append(str(unread))
self.__label += " - "
self.__label += "/".join(counts)
except Exception as err:
self.__label = "n/a"
def __getUnreadNotificationsCountByReason(self, notifications, reason):
return len(
list(
filter(
lambda notification: notification["unread"]
and notification["reason"] == reason,
notifications.json(),
)
)
)
def __getTotalUnreadNotificationsCount(self, notifications):
return len(
list(
filter(
lambda notification: notification["unread"], notifications.json()
)
)
)
def state(self, widget):
state = []
if self.__count > 0:
state.append("warning")
return state
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| mit | c1cecdb80b794bd1c3412c6b8b43853b | 27.82906 | 119 | 0.569226 | 4.313299 | false | false | false | false |
tobi-wan-kenobi/bumblebee-status | bumblebee_status/modules/core/scroll.py | 1 | 1626 | # pylint: disable=C0111,R0903
"""Displays two widgets that can be used to scroll the whole status bar
Parameters:
* scroll.width: Width (in number of widgets) to display
"""
import core.module
import core.widget
import core.input
import core.event
import util.format
class Module(core.module.Module):
def __init__(self, config, theme):
super().__init__(config, theme, [])
self.__offset = 0
self.__widgetcount = 0
w = self.add_widget(full_text = "<")
core.input.register(w, button=core.input.LEFT_MOUSE, cmd=self.scroll_left)
w = self.add_widget(full_text = ">")
core.input.register(w, button=core.input.LEFT_MOUSE, cmd=self.scroll_right)
self.__width = util.format.asint(self.parameter("width"))
config.set("output.width", self.__width)
core.event.register("output.done", self.update_done)
def scroll_left(self, _):
if self.__offset > 0:
core.event.trigger("output.scroll-left")
def scroll_right(self, _):
if self.__offset + self.__width < self.__widgetcount:
core.event.trigger("output.scroll-right")
def update_done(self, offset, widgetcount):
self.__offset = offset
self.__widgetcount = widgetcount
def scroll(self):
return False
def state(self, widget):
if widget.id == self.widgets()[0].id:
if self.__offset == 0:
return ["warning"]
elif self.__offset + self.__width >= self.__widgetcount:
return ["warning"]
return []
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| mit | 9f0cf815e08d44dd81a2716e928c0dda | 29.679245 | 83 | 0.613161 | 3.772622 | false | true | false | false |
tobi-wan-kenobi/bumblebee-status | bumblebee_status/modules/contrib/mocp.py | 1 | 1800 | # pylint: disable=C0111,R0903
# -*- coding: utf-8 -*-
"""Displays information about the current song in mocp. Left click toggles play/pause. Right click toggles shuffle.
Requires the following executable:
* mocp
Parameters:
* mocp.format: Format string for the song information. Replace string sequences with the actual information:
* %state State
* %file File
* %title Title, includes track, artist, song title and album
* %artist Artist
* %song SongTitle
* %album Album
* %tt TotalTime
* %tl TimeLeft
* %ts TotalSec
* %ct CurrentTime
* %cs CurrentSec
* %b Bitrate
* %r Sample rate
contributed by `chrugi <https://github.com/chrugi>`_ - many thanks!
"""
import core.module
import core.widget
import core.input
import util.cli
class Module(core.module.Module):
def __init__(self, config, theme):
super().__init__(config, theme, core.widget.Widget(self.description))
core.input.register(self, button=core.input.LEFT_MOUSE, cmd="mocp -G")
core.input.register(self, button=core.input.RIGHT_MOUSE, cmd="mocp -t shuffle")
self.__format = self.parameter("format", "%state %artist - %song | %ct/%tt")
self.__running = False
def description(self, widget):
return self.__info if self.__running == True else "Music On Console Player"
def update(self):
self.__load_song()
def __load_song(self):
try:
self.__info = util.cli.execute("mocp -Q '{}'".format(self.__format)).strip()
self.__running = True
except RuntimeError:
self.__running = False
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| mit | 5944fe8fd7a4611237c82bfa60cee728 | 29.508475 | 115 | 0.601111 | 3.837953 | false | false | false | false |
tobi-wan-kenobi/bumblebee-status | bumblebee_status/modules/contrib/solaar.py | 1 | 1592 | """Shows status and load percentage of logitech's unifying device
Requires the following executable:
* solaar (from community)
contributed by `cambid <https://github.com/cambid>`_ - many thanks!
"""
import logging
import core.module
import core.widget
import core.decorators
import util.cli
class Module(core.module.Module):
@core.decorators.every(seconds=30)
def __init__(self, config, theme):
super().__init__(config, theme, core.widget.Widget(self.utilization))
self.__battery = self.parameter("device", "")
self.background = True
self.__battery_status = ""
self.__error = False
if self.__battery != "":
self.__cmd = f"solaar show '{self.__battery}'"
else:
self.__cmd = "solaar show"
@property
def __format(self):
return self.parameter("format", "{}")
def utilization(self, widget):
return self.__format.format(self.__battery_status)
def update(self):
self.__error = False
code, result = util.cli.execute(
self.__cmd, ignore_errors=True, return_exitcode=True
)
if code == 0:
for line in result.split('\n'):
if line.count('Battery') > 0:
self.__battery_status = line.split(':')[1].strip()
else:
self.__error = True
logging.error(f"solaar exited with {code}: {result}")
def state(self, widget):
if self.__error:
return "warning"
return "okay"
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| mit | 96cd108a3a67ca711fd9c63cab624b95 | 26.448276 | 77 | 0.584171 | 3.930864 | false | false | false | false |
tobi-wan-kenobi/bumblebee-status | bumblebee_status/modules/contrib/cpu2.py | 1 | 5735 | """Multiwidget CPU module
Can display any combination of:
* max CPU frequency
* total CPU load in percents (integer value)
* per-core CPU load as graph - either mono or colored
* CPU temperature (in Celsius degrees)
* CPU fan speed
Requirements:
* the psutil Python module for the first three items from the list above
* sensors executable for the rest
Parameters:
* cpu2.layout: Space-separated list of widgets to add.
Possible widgets are:
* cpu2.maxfreq
* cpu2.cpuload
* cpu2.coresload
* cpu2.temp
* cpu2.fanspeed
* cpu2.colored: 1 for colored per core load graph, 0 for mono (default)
* cpu2.temp_pattern: pattern to look for in the output of 'sensors -u';
required if cpu2.temp widget is used
* cpu2.fan_pattern: pattern to look for in the output of 'sensors -u';
required if cpu2.fanspeed widget is used
Note: if you are getting 'n/a' for CPU temperature / fan speed, then you're
lacking the aforementioned pattern settings or they have wrong values.
contributed by `somospocos <https://github.com/somospocos>`_ - many thanks!
"""
import psutil
import core.module
import util.cli
import util.graph
import util.format
class Module(core.module.Module):
def __init__(self, config, theme):
super().__init__(config, theme, [])
self.__layout = self.parameter(
"layout", "cpu2.maxfreq cpu2.cpuload cpu2.coresload cpu2.temp cpu2.fanspeed"
)
self.__widget_names = self.__layout.split()
self.__colored = util.format.asbool(self.parameter("colored", False))
for widget_name in self.__widget_names:
if widget_name == "cpu2.maxfreq":
widget = self.add_widget(name=widget_name, full_text=self.maxfreq)
widget.set("type", "freq")
elif widget_name == "cpu2.cpuload":
widget = self.add_widget(name=widget_name, full_text=self.cpuload)
widget.set("type", "load")
elif widget_name == "cpu2.coresload":
widget = self.add_widget(name=widget_name, full_text=self.coresload)
widget.set("type", "loads")
elif widget_name == "cpu2.temp":
widget = self.add_widget(name=widget_name, full_text=self.temp)
widget.set("type", "temp")
elif widget_name == "cpu2.fanspeed":
widget = self.add_widget(name=widget_name, full_text=self.fanspeed)
widget.set("type", "fan")
if self.__colored:
widget.set("pango", True)
self.__temp_pattern = self.parameter("temp_pattern")
if self.__temp_pattern is None:
self.__temp = "n/a"
self.__fan_pattern = self.parameter("fan_pattern")
if self.__fan_pattern is None:
self.__fan = "n/a"
# maxfreq is loaded only once at startup
if "cpu2.maxfreq" in self.__widget_names:
self.__maxfreq = psutil.cpu_freq().max / 1000
def maxfreq(self, _):
return "{:.2f}GHz".format(self.__maxfreq)
def cpuload(self, _):
return "{:>3}%".format(self.__cpuload)
def add_color(self, bar):
"""add color as pango markup to a bar"""
if bar in ["▁", "▂"]:
color = self.theme.color("green", "green")
elif bar in ["▃", "▄"]:
color = self.theme.color("yellow", "yellow")
elif bar in ["▅", "▆"]:
color = self.theme.color("orange", "orange")
elif bar in ["▇", "█"]:
color = self.theme.color("red", "red")
colored_bar = '<span foreground="{}">{}</span>'.format(color, bar)
return colored_bar
def coresload(self, _):
mono_bars = [util.graph.hbar(x) for x in self.__coresload]
if not self.__colored:
return "".join(mono_bars)
colored_bars = [self.add_color(x) for x in mono_bars]
return "".join(colored_bars)
def temp(self, _):
if self.__temp == "n/a" or self.__temp == 0:
return "n/a"
return "{}°C".format(self.__temp)
def fanspeed(self, _):
if self.__fanspeed == "n/a":
return "n/a"
return "{}RPM".format(self.__fanspeed)
def _parse_sensors_output(self):
output = util.cli.execute("sensors -u")
lines = output.split("\n")
temp = "n/a"
fan = "n/a"
temp_line = None
fan_line = None
for line in lines:
if self.__temp_pattern is not None and self.__temp_pattern in line:
temp_line = line
if self.__fan_pattern is not None and self.__fan_pattern in line:
fan_line = line
if temp_line is not None and fan_line is not None:
break
if temp_line is not None:
temp = round(float(temp_line.split(":")[1].strip()))
if fan_line is not None:
fan = int(fan_line.split(":")[1].strip()[:-4])
return temp, fan
def update(self):
if "cpu2.maxfreq" in self.__widget_names:
self.__maxfreq = psutil.cpu_freq().max / 1000
if "cpu2.cpuload" in self.__widget_names:
self.__cpuload = round(psutil.cpu_percent(percpu=False))
if "cpu2.coresload" in self.__widget_names:
self.__coresload = psutil.cpu_percent(percpu=True)
if "cpu2.temp" in self.__widget_names or "cpu2.fanspeed" in self.__widget_names:
self.__temp, self.__fanspeed = self._parse_sensors_output()
def state(self, widget):
"""for having per-widget icons"""
return [widget.get("type", "")]
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| mit | baf2f8e117a210c7de0c68c98993860f | 36.12987 | 88 | 0.573977 | 3.628173 | false | false | false | false |
tobi-wan-kenobi/bumblebee-status | bumblebee_status/core/output.py | 1 | 10278 | import sys
import json
import time
import threading
import core.theme
import core.event
import util.format
def dump_json(obj):
return obj.dict()
def assign(src, dst, key, src_key=None, default=None):
if not src_key:
if key.startswith("_"):
src_key = key
else:
src_key = key.replace("_", "-") # automagically replace _ with -
for k in src_key if isinstance(src_key, list) else [src_key]:
if k in src:
dst[key] = src[k]
return
if default is not None:
dst[key] = default
class block(object):
__COMMON_THEME_FIELDS = [
"separator",
"separator-block-width",
"default-separators",
"border-top",
"border-left",
"border-right",
"border-bottom",
"fg",
"bg",
"padding",
"prefix",
"suffix",
]
def __init__(self, theme, module, widget):
self.__attributes = {}
for key in self.__COMMON_THEME_FIELDS:
tmp = theme.get(key, widget)
if tmp is not None:
self.__attributes[key] = tmp
self.__attributes["name"] = module.id
self.__attributes["instance"] = widget.id
self.__attributes["prev-bg"] = theme.get("bg", "previous")
def set(self, key, value):
self.__attributes[key] = value
def get(self, key, default=None):
return self.__attributes.get(key, default)
def is_pango(self, attr):
if isinstance(attr, dict) and "pango" in attr:
return True
return False
def pangoize(self, text):
if not self.is_pango(text):
return text
self.__attributes["markup"] = "pango"
attr = dict(text["pango"])
text = attr.get("full_text", "")
if "full_text" in attr:
del attr["full_text"]
result = "<span"
for key, value in attr.items():
result = '{} {}="{}"'.format(result, key, value)
result = "{}>{}</span>".format(result, text)
return result
def dict(self):
result = {}
assign(self.__attributes, result, "full_text", ["full_text", "separator"])
assign(self.__attributes, result, "separator", "default-separators")
if "_decorator" in self.__attributes:
assign(self.__attributes, result, "color", "bg")
assign(self.__attributes, result, "background", "prev-bg")
result["_decorator"] = True
else:
assign(self.__attributes, result, "color", "fg")
assign(self.__attributes, result, "background", "bg")
if "full_text" in self.__attributes:
prefix = self.__pad(self.pangoize(self.__attributes.get("prefix")))
suffix = self.__pad(self.pangoize(self.__attributes.get("suffix")))
self.set("_prefix", prefix)
self.set("_suffix", suffix)
self.set("_raw", self.get("full_text"))
result["full_text"] = self.pangoize(result["full_text"])
result["full_text"] = self.__format(self.__attributes["full_text"])
if "min-width" in self.__attributes and "padding" in self.__attributes:
self.set("min-width", self.__format(self.get("min-width")))
for k in [
"name",
"instance",
"separator_block_width",
"border",
"border_top",
"border_bottom",
"border_left",
"border_right",
"markup",
"_raw",
"_suffix",
"_prefix",
"min_width",
"align",
]:
assign(self.__attributes, result, k)
return result
def __pad(self, text):
padding = self.__attributes.get("padding", "")
if not text:
return padding
return "{}{}{}".format(padding, text, padding)
def __format(self, text):
if text is None:
return None
prefix = self.get("_prefix")
suffix = self.get("_suffix")
return "{}{}{}".format(prefix, text, suffix)
class i3(object):
def __init__(self, theme=core.theme.Theme(), config=core.config.Config([])):
self.__modules = []
self.__content = {}
self.__theme = theme
self.__config = config
self.__offset = 0
self.__lock = threading.Lock()
core.event.register("update", self.update)
core.event.register("start", self.draw, "start")
core.event.register("draw", self.draw, "statusline")
core.event.register("stop", self.draw, "stop")
core.event.register("output.scroll-left", self.scroll_left)
core.event.register("output.scroll-right", self.scroll_right)
def content(self):
return self.__content
def theme(self, new_theme=None):
if new_theme:
self.__theme = new_theme
return self.__theme
def modules(self, modules=None):
if not modules:
return self.__modules
self.__modules = modules if isinstance(modules, list) else [modules]
def toggle_minimize(self, event):
widget_id = event["instance"]
for module in self.__modules:
if module.widget(widget_id=widget_id) and util.format.asbool(module.parameter("minimize", False)) == True:
# this module can customly minimize
module.minimized = not module.minimized
return
if widget_id in self.__content:
self.__content[widget_id]["minimized"] = not self.__content[widget_id]["minimized"]
def draw(self, what, args=None):
with self.__lock:
cb = getattr(self, what)
data = cb(args) if args else cb()
if "blocks" in data:
sys.stdout.write(json.dumps(data["blocks"], default=dump_json))
if "suffix" in data:
sys.stdout.write(data["suffix"])
sys.stdout.write("\n")
sys.stdout.flush()
def start(self):
return {
"blocks": {"version": 1, "click_events": True},
"suffix": "\n[",
}
def stop(self):
return {"suffix": "\n]"}
def separator_block(self, module, widget):
if not self.__theme.get("separator"):
return []
blk = block(self.__theme, module, widget)
blk.set("_decorator", True)
return [blk]
def __content_block(self, module, widget):
blk = block(self.__theme, module, widget)
minwidth = widget.theme("minwidth")
if minwidth is not None:
try:
blk.set("min-width", "-" * int(minwidth))
except:
blk.set("min-width", minwidth)
blk.set("align", widget.theme("align"))
blk.set("full_text", "\u2026" if self.__content[widget.id]["minimized"] else self.__content[widget.id]["text"])
if widget.get("pango", False):
blk.set("markup", "pango")
if self.__config.debug():
state = module.state(widget)
if isinstance(state, list):
state = ", ".join(state)
blk.set("__state", state)
return blk
def scroll_left(self):
if self.__offset > 0:
self.__offset -= 1
def scroll_right(self):
self.__offset += 1
def blocks(self, module):
blocks = []
if module.minimized:
blocks.extend(self.separator_block(module, module.widgets()[0]))
blocks.append(self.__content_block(module, module.widgets()[0]))
self.__widgetcount += 1
return blocks
width = self.__config.get("output.width", 0)
for widget in module.widgets():
if module.scroll() == True and width > 0:
self.__widgetcount += 1
if self.__widgetcount-1 < self.__offset:
continue
if self.__widgetcount-1 >= self.__offset + width:
continue
if widget.module and self.__config.autohide(widget.module.name):
if not any(
state in widget.state() for state in ["warning", "critical", "no-autohide"]
):
continue
if module.hidden():
continue
if widget.hidden:
continue
if "critical" in widget.state() and self.__config.errorhide(widget.module.name):
continue
blocks.extend(self.separator_block(module, widget))
blocks.append(self.__content_block(module, widget))
core.event.trigger("next-widget")
core.event.trigger("output.done", self.__offset, self.__widgetcount)
return blocks
def update(self, affected_modules=None, redraw_only=False, force=False):
with self.__lock:
self.update2(affected_modules, redraw_only, force)
def update2(self, affected_modules=None, redraw_only=False, force=False):
now = time.time()
for module in self.__modules:
if affected_modules and not module.id in affected_modules:
continue
if not affected_modules and module.next_update:
if now < module.next_update and not force:
continue
if not redraw_only:
module.update_wrapper()
if module.parameter("interval", "") != "never":
module.next_update = now + util.format.seconds(
module.parameter("interval", self.__config.interval())
)
else:
module.next_update = sys.maxsize
for widget in module.widgets():
if not widget.id in self.__content:
self.__content[widget.id] = { "minimized": False }
self.__content[widget.id]["text"] = widget.full_text()
def statusline(self):
blocks = []
self.__widgetcount = 0
for module in self.__modules:
blocks.extend(self.blocks(module))
return {"blocks": blocks, "suffix": ","}
def wait(self, interval):
time.sleep(interval)
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| mit | a1a0da5078676f3fa10b3d61ee581c40 | 32.588235 | 119 | 0.532205 | 4.15945 | false | false | false | false |
tobi-wan-kenobi/bumblebee-status | bumblebee_status/modules/contrib/docker_ps.py | 1 | 1263 | # -*- coding: utf-8 -*-
"""Displays the number of docker containers running
Requires the following python packages:
* docker
contributed by `jlopezzarza <https://github.com/jlopezzarza>`_ - many thanks!
"""
import docker
from requests.exceptions import ConnectionError
import core.module
import core.widget
import core.decorators
class Module(core.module.Module):
@core.decorators.every(seconds=5)
def __init__(self, config, theme):
super().__init__(config, theme, core.widget.Widget(self.docker_info))
self.__info = ""
def state(self, widget):
state = []
if self.__info == "OK - 0":
state.append("warning")
elif self.__info in ["n/a", "off"]:
state.append("critical")
return state
def docker_info(self, widget):
try:
cli = docker.DockerClient(base_url="unix://var/run/docker.sock")
cli.ping()
self.__info = "OK - {}".format(
len(cli.containers.list(filters={"status": "running"}))
)
except ConnectionError:
self.__info = "off"
except Exception:
self.__info = "n/a"
return self.__info
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| mit | e1bf75bef70b22862125718d9c9b0cd3 | 25.3125 | 77 | 0.590657 | 3.934579 | false | false | false | false |
tobi-wan-kenobi/bumblebee-status | bumblebee_status/modules/core/git.py | 1 | 2138 | # pylint: disable=C0111,R0903
"""Print the branch and git status for the
currently focused window.
Requires:
* xcwd
* Python module 'pygit2'
"""
import os
import pygit2
import core.module
import util.cli
class Module(core.module.Module):
def __init__(self, config, theme):
super().__init__(config, theme, [])
self.__error = False
def hidden(self):
return self.__error
def update(self):
state = {}
self.clear_widgets()
try:
directory = util.cli.execute("xcwd").strip()
directory = self.__get_git_root(directory)
repo = pygit2.Repository(directory)
self.add_widget(name="git.main", full_text=repo.head.shorthand)
for filepath, flags in repo.status().items():
if (
flags == pygit2.GIT_STATUS_WT_NEW
or flags == pygit2.GIT_STATUS_INDEX_NEW
):
state["new"] = True
if (
flags == pygit2.GIT_STATUS_WT_DELETED
or flags == pygit2.GIT_STATUS_INDEX_DELETED
):
state["deleted"] = True
if (
flags == pygit2.GIT_STATUS_WT_MODIFIED
or flags == pygit2.GIT_STATUS_INDEX_MODIFIED
):
state["modified"] = True
self.__error = False
if "new" in state:
self.add_widget(name="git.new")
if "modified" in state:
self.add_widget(name="git.modified")
if "deleted" in state:
self.add_widget(name="git.deleted")
except Exception as e:
self.__error = True
def state(self, widget):
return widget.name.split(".")[1]
def __get_git_root(self, directory):
while len(directory) > 1:
if os.path.exists(os.path.join(directory, ".git")):
return directory
directory = "/".join(directory.split("/")[0:-1])
return "/"
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| mit | 32a9510f0282e9b7a51856d69de6a4f0 | 27.131579 | 75 | 0.511693 | 4.159533 | false | false | false | false |
tobi-wan-kenobi/bumblebee-status | bumblebee_status/util/location.py | 1 | 3512 | """Retrieves location information from an external
service and caches it for 12h (retries are done every
30m in case of problems)
Right now, it uses (in order of preference):
- http://free.ipwhois.io/ - 10k free requests/month
- http://ipapi.co/ - 30k free requests/month
- http://ip-api.com/ - ~2m free requests/month
"""
import json
import time
import urllib.request
__document = None
__data = {}
__next = 0
__sources = [
{
"url": "http://free.ipwhois.io/json/",
"mapping": {
"latitude": "latitude",
"longitude": "longitude",
"country": "country_name",
"country_code": "country_code",
"city": "city_name",
"ip": "public_ip",
},
},
{
"url": "http://ip-api.com/json",
"mapping": {
"latitude": "lat",
"longitude": "lon",
"country": "country_name",
"countryCode": "country_code",
"city": "city_name",
"query": "public_ip",
},
},
{
"url": "http://ipapi.co/json",
"mapping": {
"latitude": "latitude",
"longitude": "longitude",
"country_name": "country_name",
"country_code": "country_code",
"city": "city_name",
"ip": "public_ip",
},
}
]
def __expired():
global __next
return __next <= time.time()
def __load():
global __data
global __next
__data = {}
for src in __sources:
try:
tmp = json.loads(urllib.request.urlopen(src["url"]).read())
for k, v in src["mapping"].items():
__data[v] = tmp.get(k, None)
__next = time.time() + 60 * 60 * 12 # update once every 12h
return
except Exception as e:
pass
__next = time.time() + 60 * 30 # error - try again every 30m
def __get(name):
global __data
if not __data or __expired():
__load()
if name in __data:
return __data[name]
else:
return None
def reset():
"""Resets the location library, ensuring that a new query will be started"""
global __next
global __data
__data = None
__next = 0
def coordinates():
"""Returns a latitude, longitude pair
:return: current latitude and longitude
:rtype: pair of strings
"""
return __get("latitude"), __get("longitude")
def country():
"""Returns the current country name
:return: country name
:rtype: string
"""
return __get("country_name")
def country_code():
"""Returns the current country code
:return: country code
:rtype: string
"""
return __get("country_code")
def city_name():
"""Returns the current city name
:return: city name
:rtype: string
"""
return __get("city_name")
def public_ip():
"""Returns the current public IP
:return: public IP
:rtype: string
"""
return __get("public_ip")
def location_info():
"""Returns the current location information
:return: public IP, country name, country code, city name & coordinates
:rtype: dictionary
"""
return {
"public_ip": __get("public_ip"),
"country": __get("country_name"),
"country_code": __get("country_code"),
"city_name": __get("city_name"),
"latitude": __get("latitude"),
"longitude": __get("longitude"),
}
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| mit | 5232c865b8fcf10824753e6025899968 | 21.08805 | 80 | 0.531606 | 3.855104 | false | false | false | false |
tobi-wan-kenobi/bumblebee-status | bumblebee_status/modules/contrib/messagereceiver.py | 1 | 2803 | # pylint: disable=C0111,R0903
"""
Displays the message that's received via unix socket.
Parameters:
* messagereceiver : Unix socket address (e.g: /tmp/bumblebee_messagereceiver.sock)
Example:
The following examples assume that /tmp/bumblebee_messagereceiver.sock is used as unix socket address.
In order to send the string "I bumblebee-status" to your status bar, use the following command:
echo -e '{"message":"I bumblebee-status", "state": ""}' | socat unix-connect:/tmp/bumblebee_messagereceiver.sock STDIO
In order to highlight the text, the state variable can be used:
echo -e '{"message":"I bumblebee-status", "state": "warning"}' | socat unix-connect:/tmp/bumblebee_messagereceiver.sock STDIO
contributed by `bbernhard <https://github.com/bbernhard>`_ - many thanks!
"""
import socket
import logging
import os
import json
import core.module
import core.widget
import core.input
class Module(core.module.Module):
@core.decorators.never
def __init__(self, config, theme):
super().__init__(config, theme, core.widget.Widget(self.message))
self.background = True
self.__unix_socket_address = self.parameter("address", "")
self.__message = ""
self.__state = []
def message(self, widget):
return self.__message
def __read_data_from_socket(self):
while True:
try:
os.unlink(self.__unix_socket_address)
except OSError:
if os.path.exists(self.__unix_socket_address):
logging.exception(
"Couldn't bind to unix socket %s", self.__unix_socket_address
)
raise
with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as s:
s.bind(self.__unix_socket_address)
s.listen()
conn, _ = s.accept()
with conn:
while True:
data = conn.recv(1024)
if not data:
break
yield data.decode("utf-8")
def update(self):
try:
for received_data in self.__read_data_from_socket():
parsed_data = json.loads(received_data)
self.__message = parsed_data["message"]
self.__state = parsed_data["state"]
core.event.trigger("update", [self.id], redraw_only=True)
except json.JSONDecodeError:
logging.exception("Couldn't parse message")
except Exception:
logging.exception("Unexpected exception while reading from socket")
def state(self, widget):
return self.__state
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| mit | dc02882ed128ea2696f51f4bbbcbd7d1 | 31.905882 | 135 | 0.585627 | 4.131462 | false | false | false | false |
pythonindia/junction | junction/urls.py | 1 | 5345 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import django.views.defaults
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic.base import RedirectView, TemplateView
from rest_framework import routers
import junction.proposals.dashboard
from junction.conferences import views as conference_views
from junction.devices.views import DeviceDetailApiView, DeviceListApiView
from junction.feedback.views import (
FeedbackListApiView,
FeedbackQuestionListApiView,
view_feedback,
)
from junction.proposals import views as proposal_views
from junction.schedule import views as schedule_views
from junction.schedule.views import non_proposal_schedule_item_view
from .views import HomePageView
router = routers.DefaultRouter()
router.register("conferences", conference_views.ConferenceView)
router.register("venues", conference_views.VenueView)
router.register("rooms", conference_views.RoomView)
router.register("proposals", proposal_views.ProposalView)
router.register("schedules", schedule_views.ScheduleView)
"""
Root url routering file.
You should put the url config in their respective app putting only a
reference to them here.
"""
urlpatterns = [
url(r"^$", HomePageView.as_view(), name="page-home"),
# Django Admin
url(r"^nimda/", admin.site.urls),
# Third Party Stuff
url(r"^accounts/", include("allauth.urls")),
url("^markdown/", include("django_markdown.urls")),
# Tickets
url(r"^tickets/", include("junction.tickets.urls")),
url(
r"^feedback/(?P<schedule_item_id>\d+)/$", view_feedback, name="feedback-detail"
),
url(
r"^schedule_item/(?P<sch_item_id>\d+)/$",
non_proposal_schedule_item_view,
name="schedule-item",
),
url(r"^api/v1/", include(router.urls)),
# Device
url(r"^api/v1/devices/$", DeviceListApiView.as_view(), name="device-list"),
url(
r"^api/v1/devices/(?P<_uuid>[\w-]+)/$",
DeviceDetailApiView.as_view(),
name="device-detail",
),
# Feedback
url(
"^api/v1/feedback_questions/$",
FeedbackQuestionListApiView.as_view(),
name="feedback-questions-list",
),
url("^api/v1/feedback/$", FeedbackListApiView.as_view(), name="feedback-list"),
# User Dashboard
url(r"^profiles/", include("junction.profiles.urls", namespace="profiles")),
# Static Pages. TODO: to be refactored
url(
r"^speakers/$",
TemplateView.as_view(template_name="static-content/speakers.html",),
name="speakers-static",
),
url(
r"^schedule/$",
TemplateView.as_view(template_name="static-content/schedule.html",),
name="schedule-static",
),
url(
r"^venue/$",
TemplateView.as_view(template_name="static-content/venue.html",),
name="venue-static",
),
url(
r"^sponsors/$",
TemplateView.as_view(template_name="static-content/sponsors.html",),
name="sponsors-static",
),
url(
r"^blog/$",
TemplateView.as_view(template_name="static-content/blog-archive.html",),
name="blog-archive",
),
url(
r"^coc/$",
TemplateView.as_view(template_name="static-content/coc.html",),
name="coc-static",
),
url(
r"^faq/$",
TemplateView.as_view(template_name="static-content/faq.html",),
name="faq-static",
),
# Conference Pages
url(r"^(?P<conference_slug>[\w-]+)/", include("junction.conferences.urls")),
# Proposals related
url(r"^(?P<conference_slug>[\w-]+)/proposals/", include("junction.proposals.urls")),
url(
r"^(?P<conference_slug>[\w-]+)/dashboard/reviewers/",
junction.proposals.dashboard.reviewer_comments_dashboard,
name="proposal-reviewers-dashboard",
),
url(
r"^(?P<conference_slug>[\w-]+)/dashboard/proposal_state/$",
junction.proposals.dashboard.proposal_state,
name="proposal-state",
),
url(
r"^(?P<conference_slug>[\w-]+)/dashboard/$",
junction.proposals.dashboard.proposals_dashboard,
name="proposal-dashboard",
),
url(
r"^(?P<conference_slug>[\w-]+)/dashboard/votes/$",
junction.proposals.dashboard.reviewer_votes_dashboard,
name="proposal-reviewer-votes-dashboard",
),
url(
r"^(?P<conference_slug>[\w-]+)/dashboard/votes/export/$",
junction.proposals.dashboard.export_reviewer_votes,
name="export-reviewer-votes",
),
# Schedule related
url(r"^(?P<conference_slug>[\w-]+)/schedule/", include("junction.schedule.urls")),
# Proposals as conference home page. TODO: Needs to be enhanced
url(
r"^(?P<conference_slug>[\w-]+)/",
RedirectView.as_view(pattern_name="proposals-list"),
name="conference-detail",
),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
urlpatterns += [
url(r"^400/$", django.views.defaults.bad_request), # noqa
url(r"^403/$", django.views.defaults.permission_denied),
url(r"^404/$", django.views.defaults.page_not_found),
url(r"^500/$", django.views.defaults.server_error),
]
| mit | 9488f04d446947bb376d1700d3481293 | 32.40625 | 88 | 0.64565 | 3.560959 | false | false | true | false |
spectralpython/spectral | spectral/algorithms/continuum.py | 1 | 14678 | '''
Continuum and continuum removal.
Continuum is defined as convex hull of spectrum.
Continuum is removed from spectra by dividing spectra by its continuum.
That results in values between 0 and 1, where absorption bands are expressed as
drops below 1. It is usefull for comparing and classification based on
absorption bands and indifferent to scale.
References:
Clark, R.N. and Roush, L. (1984) Reflectance Spectroscopy Quantitative Analysis
Techniques for Remote Sensing Applications. Journal of Geophysical Research,
89, 6329-6340. http://dx.doi.org/10.1029/JB089iB07p06329
Jiwei Bai, et al., "Classification methods of the hyperspectralimage based
on the continuum-removed," Proc. SPIE 4897, Multispectral and Hyperspectral
Remote Sensing Instruments and Applications, (16 June 2003);
doi: 10.1117/12.466729
Lehnert, Lukas & Meyer, Hanna & Obermeier, Wolfgang & Silva, Brenner & Regeling,
Bianca & Thies, Boris & Bendix, Jorg. (2019). Hyperspectral Data Analysis in R:
The hsdar Package. Journal of statistical software. 89. 1-23. 10.18637/jss.v089.i12.
'''
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import numpy as np
import spectral as spy
from ..utilities.errors import has_nan, NaNValueError
def _segment_concave_region(spectrum, bands, indices, ind_fill, ibegin, iend):
# Here we don't search for local maxima w.r.t. line that connects ends of this region.
# That is behavior of the hsdar. It also makes more sense in the context of
# hyperspectral image analysis. We are already not convex, and we can't
# include all points that make result quasi-convex, since there will be too
# many of them, often right one after another. However, filtering local maxima,
# below, will make result quasi-convex.
# Notice that we are using >=, not strict >. That will include maxima that
# are flat, that stretch few points. It will also include local minima,
# but just as with local maxima that are too low below slope line, these
# will be filtered out.
is_maximum = np.logical_and(spectrum[ibegin+1:iend-1] >= spectrum[ibegin:iend-2],
spectrum[ibegin+1:iend-1] >= spectrum[ibegin+2:iend])
# Get local maxima indices. (Note that where return tuple for each dimension).
lmi = np.where(is_maximum)[0]
# No local maxima, return.
if len(lmi) == 0:
return ind_fill
# Make it relative to input array - spectrum.
lmi += ibegin + 1
# Get local maxima.
local_maxima = spectrum[lmi]
# Filter those maxima that cause slope between them to change direction.
# This makes remaining maxima, satisfy quasy-convexity condition.
slope_dir = spectrum[iend-1] - spectrum[ibegin]
filtered_indices = []
if slope_dir >= 0.0:
last_included_value = spectrum[ibegin]
for i in range(len(local_maxima)):
lm = local_maxima[i]
if lm > last_included_value:
filtered_indices.append(lmi[i])
last_included_value = lm
else:
# Slope is negative. Start from back.
last_included_value = spectrum[iend-1]
for i in range(len(local_maxima) - 1, -1, -1):
lm = local_maxima[i]
if lm > last_included_value:
filtered_indices.append(lmi[i])
last_included_value = lm
filtered_indices.reverse()
# Take only valid local maxima indices.
lmi = filtered_indices
# If there is no valid local maxima indices, return.
if len(lmi) == 0:
return ind_fill
# Add indices to result, and process subregions between them with convex hull
# algorithm, to make sure all input points and below resulting hull.
next_ibegin = ibegin
for i in lmi:
# There should be at least 1 point between edges, to call _find_indices_in_range.
# However, these are to local maxima, and if there is one point between them,
# it must be below both. So only for two points inside region borders
# call _find_indices_in_range.
if i > next_ibegin + 2:
# Put hull around points in subregion.
ind_fill = _find_indices_in_range(
spectrum, bands, False, indices, ind_fill, next_ibegin, i + 1)
indices[ind_fill] = i
ind_fill += 1
next_ibegin = i
# Don't miss the last range.
ind_fill = _find_indices_in_range(
spectrum, bands, False, indices, ind_fill, lmi[-1], iend)
return ind_fill
def _find_indices_in_range(spectrum, bands, segmented, indices, ind_fill, ibegin, iend):
iendi = iend - 1
# We search for maximum, but not from the x axis.
# We search for maximum w.r.t to axis represented by line connecting
# first and last point (of this iteration).
# First find normal to new axis. Swap x and y, and negate new x.
# If we negate x instead of y, normal will always point upward.
naxis_y = bands[iendi] - bands[ibegin]
naxis_x = spectrum[ibegin] - spectrum[iendi]
# Don't literally compute distance from the axis. Rather, calculate dot products
# of points with the normal, and find the largest. The largest dot product (it does not have to be positive)
# is the one that goes more in the direction of normal than others. To get the distance,
# we could divide each dot product by norm/length of the normal. But that is constant,
# and does not effect which one is maximum.
# Note that here we include first point of the range, but not last.
imax = np.argmax(bands[ibegin:iendi] * naxis_x +
spectrum[ibegin:iendi] * naxis_y) + ibegin
# If first point is maximum, then all others are "below" the axis,
# which means this is concave region.
if imax == ibegin:
# If we are in segmented upper hull mode, then segment concave region.
# For that to make sense, we need at least 3 elements between edges.
if segmented and iend - ibegin > 5:
ind_fill = _segment_concave_region(
spectrum, bands, indices, ind_fill, ibegin, iend)
return ind_fill
# Repeat same procedure on the left side, if there are enough points left.
# At least 1 is required between first and last point in range.
if imax > ibegin + 1:
ind_fill = _find_indices_in_range(
spectrum, bands, segmented, indices, ind_fill, ibegin, imax + 1)
# Push middle index.
indices[ind_fill] = imax
ind_fill += 1
# Repeat for the right side.
if imax < iend - 2:
ind_fill = _find_indices_in_range(
spectrum, bands, segmented, indices, ind_fill, imax, iend)
return ind_fill
def _find_continuum_points_recursive(spectrum, bands, segmented, indices):
n = len(spectrum)
indices[0] = 0
ind_fill = 1
ind_fill = _find_indices_in_range(
spectrum, bands, segmented, indices, ind_fill, 0, n)
indices[ind_fill] = n - 1
indices = indices[:ind_fill + 1]
return (bands[indices], spectrum[indices])
def _process_continuum(spectra, bands, remove_continuum, segmented, out):
if not isinstance(spectra, np.ndarray):
raise TypeError('Expected spectra to be a numpy.ndarray.')
if not isinstance(bands, np.ndarray):
raise TypeError('Expected bands to be a numpy.ndarray.')
if out is not None and not isinstance(out, np.ndarray):
raise TypeError('Expected out to be a numpy.ndarray or None.')
if len(spectra.shape) not in (1, 2, 3):
raise ValueError('Expected spectra to be 1d, 2d, or 3d array.')
if len(bands.shape) != 1:
raise ValueError('Expected bands to be 1d array.')
if out is not None and not np.array_equal(out.shape, spectra.shape):
raise ValueError('Expected out to be same shape as spectra.')
out = np.empty_like(spectra) if out is None else out
# In case we remove continuum, always divide out by continuum,
# to avoid creating additional temporary array.
if spectra is not out and remove_continuum:
out[:] = spectra[:]
original_shape = spectra.shape
nbands = original_shape[-1]
interp = np.interp
indices = np.empty(nbands, np.int64)
if len(spectra.shape) == 1:
points = _find_continuum_points_recursive(
spectra, bands, segmented, indices)
continuum = interp(bands, points[0], points[1])
if remove_continuum:
out /= continuum
else:
out[:] = continuum
elif len(spectra.shape) == 2:
for i in range(spectra.shape[0]):
points = _find_continuum_points_recursive(
spectra[i], bands, segmented, indices)
continuum = interp(bands, points[0], points[1])
if remove_continuum:
out[i, :] /= continuum
else:
out[i, :] = continuum
else:
for i in range(spectra.shape[0]):
for j in range(spectra.shape[1]):
points = _find_continuum_points_recursive(
spectra[i, j], bands, segmented, indices)
continuum = interp(bands, points[0], points[1])
if remove_continuum:
out[i, j, :] /= continuum
else:
out[i, j, :] = continuum
return out
def continuum_points(spectrum, bands, mode='convex'):
'''Returns points of spectra that belong to it's continuum.
Arguments:
`spectrum` (:class:`numpy.ndarray`)
1d :class:`numpy.ndarray` holding spectral signature.
`bands` (:class:`numpy.ndarray`):
1d :class:`numpy.ndarray`, holding band values of spectra.
Length of `bands` should be the same as `spectrum`.
Note that bands should be sorted in ascending order (which is often
not the case with AVIRIS), otherwise unexpected results could occure.
`mode` (string, default 'convex'):
Default mode is 'convex' which returns convex upper hull of the
spectrum. Another supported mode is 'segmented' which builds
segmented upper hull. This is usefull to identify more detailed
contour of the spectrum, but without strong absorption bands.
Returns:
2-tuple, with each element being :class:`numpy.ndarray`.
First element contains reflectance values of points that belong to
continuum. Second element contains corresponding bands.
By applying linear interpolation to this data as x and y, we get
continuum of spectrum. However this function is particularly useful to
applying other interpolations or any other processing on these points.
'''
if not isinstance(spectrum, np.ndarray):
raise TypeError('Expected spectra to be a numpy.ndarray.')
if not isinstance(bands, np.ndarray):
raise TypeError('Expected bands to be a numpy.ndarray.')
if len(spectrum.shape) != 1:
raise ValueError('Expected spectra to be 1d array.')
if len(bands.shape) != 1:
raise ValueError('Expected bands to be 1d array.')
indices = np.empty_like(spectrum, dtype='int64')
return _find_continuum_points_recursive(spectrum, bands, mode == 'segmented', indices)
def spectral_continuum(spectra, bands, mode='convex', out=None):
'''Returns continua of spectra.
Continuum is defined as convex hull of spectra.
Arguments:
`spectra` (:class:`numpy.ndarray`)
Can be 1d, 2d or 3d :class:`numpy.ndarray`, where last dimension
holds individual spectra.
`bands` (:class:`numpy.ndarray`):
1d :class:`numpy.ndarray`, holding band values of spectra.
Length of `bands` should be the same as last dimension of `spectra`.
Note that bands should be sorted in ascending order (which is often
not the case with AVIRIS), otherwise unexpected results could occure.
`mode` (string, default 'convex'):
Default mode is 'convex' which returns convex upper hull of the
spectrum. Another supported mode is 'segmented' which builds
segmented upper hull. This is usefull to identify more detailed
contour of the spectrum, but without strong absorption bands.
`out` (:class:`numpy.ndarray`, default None):
If provided, it must have same type and same shape as `spectra`,
and it will hold the result, and will be returned as result of this
function.
Returns:
A :class:`numpy.ndarray` of continua for each spectrum in spectra.
It same type and shape as spectra. If `out` is provided, `out` will be
returned.
'''
return _process_continuum(spectra, bands, False, mode == 'segmented', out)
def remove_continuum(spectra, bands, mode='convex', out=None):
'''Returns spectra with continuum removed.
Continuum is defined as convex hull of spectra. Continuum is removed from
spectra by deviding spectra by its continuum.
Arguments:
`spectra` (:class:`numpy.ndarray`)
Can be 1d, 2d or 3d :class:`numpy.ndarray`, where last dimension
holds individual spectra.
`bands` (:class:`numpy.ndarray`):
1d :class:`numpy.ndarray`, holding band values of spectra.
Length of `bands` should be the same as last dimension of `spectra`.
Note that bands should be sorted in ascending order (which is often
not the case with AVIRIS), otherwise unexpected results could occure.
`mode` (string, default 'convex'):
Default mode is 'convex' which removes convex upper hull of the
spectrum. Another supported mode is 'segmented' which removes
segmented upper hull. This is usefull to identify two or more small
features instead of one large feature.
`out` (:class:`numpy.ndarray`, default None):
If provided, it must have type `np.float64` and same shape as
`spectra`, and it will hold the result, and will be returned as
result of this function.
Returns:
A :class:`numpy.ndarray` of continua for in spectrum in spectra.
It type `np.float64` and same shape as spectra. If `out` is provided,
`out` will be returned.
'''
if out is not None and out.dtype != np.float64:
raise ValueError('Expected out to have dtype float64. '
'Results of continuum removal are floating point numbers.')
return _process_continuum(spectra, bands, True, mode == 'segmented', out)
| mit | 6842151931a3d6fe07b427b1ec545ca2 | 40 | 112 | 0.649135 | 3.904762 | false | false | false | false |
pythonindia/junction | junction/base/constants.py | 1 | 2834 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import inspect
def _user_attributes(cls):
defaults = dir(type(str("defaults"), (object,), {})) # gives all inbuilt attrs
return [item[0] for item in inspect.getmembers(cls) if item[0] not in defaults]
def choices(cls):
"""
Decorator to set `CHOICES` and other attributes
"""
_choices = []
for attr in _user_attributes(cls):
val = getattr(cls, attr)
setattr(cls, attr[1:], val[0])
_choices.append((val[0], val[1]))
setattr(cls, "CHOICES", tuple(_choices))
return cls
@choices
class ConferenceStatus:
_ACCEPTING_CFP = [1, "Accepting Proposals"]
_CLOSED_CFP = [2, "Proposal submission closed"]
_ACCEPTING_VOTES = [3, "Accepting Votes"]
_SCHEDULE_PUBLISHED = [4, "Schedule Published"]
@choices
class ProposalStatus:
_DRAFT = [1, "Draft"]
_PUBLIC = [2, "Public"]
_CANCELLED = [3, "Cancelled"]
@choices
class ProposalReviewStatus:
_YET_TO_BE_REVIEWED = [1, "Yet to be reviewed"]
_SELECTED = [2, "Selected"]
_REJECTED = [3, "Rejected"]
_ON_HOLD = [4, "On hold"]
_WAIT_LISTED = [5, "Wait-listed"]
@choices
class ProposalTargetAudience:
_BEGINNER = [1, "Beginner"]
_INTERMEDIATE = [2, "Intermediate"]
_ADVANCED = [3, "Advanced"]
@choices
class ProposalUserVoteRole:
_PUBLIC = [1, "Public"]
_REVIEWER = [2, "Reviewer"]
@choices
class ProposalReviewVote:
_MUST_HAVE = [2, "Must have"]
_GOOD = [1, "Good"]
_NOT_BAD = [0, "Not Bad"]
_NOT_ALLOWED = [-1, "Shouldn't be allowed"]
# FIXME: `ProposalReviewerComment` should be Boolean
@choices
class ProposalReviewerComment:
_COMMENTED = ["True", "Yes"]
_NOT_COMMENTED = ["False", "No"]
@choices
class ProposalVotesFilter:
_NO_VOTES = [0, "No votes"]
_MIN_ONE_VOTE = [1, "Minimum 1 vote"]
_SORT_BY_SUM = [2, "Sort by total votes"]
_SORT_BY_REVIEWER = [3, "Sort by your votes"]
_SORT_BY_SELECTION = [4, "Sort by selection"]
class ConferenceSettingConstants:
ALLOW_PUBLIC_VOTING_ON_PROPOSALS = {
"name": "allow_public_voting_on_proposals",
"value": True,
"description": "Allow public to vote on proposals",
}
DISPLAY_PROPOSALS_IN_PUBLIC = {
"name": "display_proposals_in_public",
"value": True,
"description": "Display proposals in public",
}
ALLOW_PLUS_ZERO_REVIEWER_VOTE = {
"name": "allow_plus_zero_reviewer_vote",
"value": True,
"description": "Allow +0 vote in reviewer votes",
}
@choices
class PSRVotePhase:
_PRIMARY = [0, "Initial voting"]
_SECONDARY = [1, "Second phase voting"]
@choices
class ProposalCommentType:
_GENERAL = [0, "All general comments"]
_SECONDARY_VOTING = [1, "Second phase voting"]
| mit | d7734abab5e35aa2f33d586cd42acf8b | 23.643478 | 83 | 0.618913 | 3.070423 | false | false | false | false |
pythonindia/junction | junction/feedback/admin.py | 1 | 3402 | # -*- coding: utf-8 -*-
from django.contrib import admin
from junction.base.admin import TimeAuditAdmin
from junction.conferences import service
from .models import (
ChoiceFeedbackQuestion,
ChoiceFeedbackQuestionValue,
ScheduleItemChoiceFeedback,
ScheduleItemTextFeedback,
TextFeedbackQuestion,
)
# Register your models here.
class TextFeedbackQuestionAdmin(TimeAuditAdmin):
list_display = (
"title",
"schedule_item_type",
"conference",
) + TimeAuditAdmin.list_display # noqa
def get_queryset(self, request):
qs = super(TextFeedbackQuestionAdmin, self).get_queryset(request)
if request.user.is_superuser:
return qs
moderators = service.list_conference_moderator(user=request.user)
return qs.filter(conference__in=[m.conference for m in moderators])
class ChoiceFeedbackQuestionAdmin(TimeAuditAdmin):
list_display = (
"title",
"schedule_item_type",
"conference",
) + TimeAuditAdmin.list_display # noqa
def get_queryset(self, request):
qs = super(ChoiceFeedbackQuestionAdmin, self).get_queryset(request)
if request.user.is_superuser:
return qs
moderators = service.list_conference_moderator(user=request.user)
return qs.filter(conference__in=[m.conference for m in moderators])
class ChoiceFeedbackQuestionValueAdmin(TimeAuditAdmin):
list_display = ("question", "title", "value") + TimeAuditAdmin.list_display # noqa
def get_queryset(self, request):
qs = super(ChoiceFeedbackQuestionValueAdmin, self).get_queryset(request)
if request.user.is_superuser:
return qs
moderators = service.list_conference_moderator(user=request.user)
return qs.filter(question__conference__in=[m.conference for m in moderators])
class ScheduleItemTextFeedbackAdmin(TimeAuditAdmin):
list_display = (
"schedule_item",
"question",
"text",
"device",
) + TimeAuditAdmin.list_display # noqa
list_filter = ["schedule_item"] # noqa
def get_queryset(self, request):
qs = super(ScheduleItemTextFeedbackAdmin, self).get_queryset(request)
if request.user.is_superuser:
return qs
moderators = service.list_conference_moderator(user=request.user)
return qs.filter(question__conference__in=[m.conference for m in moderators])
class ScheduleItemChoiceFeedbackAdmin(TimeAuditAdmin):
list_display = (
"schedule_item",
"question",
"value",
"device",
) + TimeAuditAdmin.list_display # noqa
list_filter = ["schedule_item"] # noqa
def get_queryset(self, request):
qs = super(ScheduleItemChoiceFeedbackAdmin, self).get_queryset(request)
if request.user.is_superuser:
return qs
moderators = service.list_conference_moderator(user=request.user)
return qs.filter(question__conference__in=[m.conference for m in moderators])
admin.site.register(TextFeedbackQuestion, TextFeedbackQuestionAdmin)
admin.site.register(ChoiceFeedbackQuestion, ChoiceFeedbackQuestionAdmin)
admin.site.register(ChoiceFeedbackQuestionValue, ChoiceFeedbackQuestionValueAdmin)
admin.site.register(ScheduleItemTextFeedback, ScheduleItemTextFeedbackAdmin)
admin.site.register(ScheduleItemChoiceFeedback, ScheduleItemChoiceFeedbackAdmin)
| mit | e60c1a1fcb39a1772eedd7b877b5dddb | 33.714286 | 87 | 0.702528 | 4.059666 | false | false | false | false |
pythonindia/junction | junction/feedback/serializers.py | 1 | 1459 | # -*- coding: utf-8 -*-
from rest_framework import serializers
from .models import (
ChoiceFeedbackQuestion,
ChoiceFeedbackQuestionValue,
TextFeedbackQuestion,
)
def object_exists(model, pk):
if not model.objects.filter(pk=pk):
raise serializers.ValidationError("The question doesn't exist")
return True
class FeedbackQueryParamsSerializer(serializers.Serializer):
conference_id = serializers.IntegerField()
class TextFeedbackSerializer(serializers.Serializer):
id = serializers.IntegerField()
text = serializers.CharField()
def validate(self, data):
if object_exists(TextFeedbackQuestion, pk=data["id"]):
return data
class ChoiceFeedbackSerializer(serializers.Serializer):
id = serializers.IntegerField()
value_id = serializers.IntegerField()
def validate(self, data):
if object_exists(ChoiceFeedbackQuestion, pk=data["id"]):
if ChoiceFeedbackQuestionValue.objects.filter(
question_id=data["id"], pk=data["value_id"]
).exists():
return data
raise serializers.ValidationError(
"The multiple choice value isn't associated with question"
)
class FeedbackSerializer(serializers.Serializer):
schedule_item_id = serializers.IntegerField()
text = TextFeedbackSerializer(many=True, required=False)
choices = ChoiceFeedbackSerializer(many=True, required=False)
| mit | e71e5ec60ef16b3aee15d17a6b425f65 | 28.77551 | 74 | 0.694311 | 4.646497 | false | false | false | false |
saxix/django-concurrency | src/concurrency/fields.py | 1 | 13785 | import copy
import functools
import hashlib
import logging
import time
from collections import OrderedDict
from functools import update_wrapper
from django.db import models
from django.db.models import signals
from django.db.models.fields import Field
from django.db.models.signals import class_prepared, post_migrate
from django.utils.encoding import force_str
from django.utils.translation import gettext_lazy as _
from concurrency import forms
from concurrency.api import get_revision_of_object
from concurrency.config import conf
from concurrency.core import ConcurrencyOptions
from concurrency.utils import fqn, refetch
from .triggers import _TRIGGERS
logger = logging.getLogger(__name__)
OFFSET = int(time.mktime((2000, 1, 1, 0, 0, 0, 0, 0, 0)))
def class_prepared_concurrency_handler(sender, **kwargs):
if hasattr(sender, '_concurrencymeta'):
if sender != sender._concurrencymeta.base:
origin = getattr(sender._concurrencymeta.base, '_concurrencymeta')
local = copy.deepcopy(origin)
setattr(sender, '_concurrencymeta', local)
if hasattr(sender, 'ConcurrencyMeta'):
sender._concurrencymeta.enabled = getattr(sender.ConcurrencyMeta, 'enabled', True)
check_fields = getattr(sender.ConcurrencyMeta, 'check_fields', None)
ignore_fields = getattr(sender.ConcurrencyMeta, 'ignore_fields', None)
if check_fields and ignore_fields:
raise ValueError("Cannot set both 'check_fields' and 'ignore_fields'")
sender._concurrencymeta.check_fields = check_fields
sender._concurrencymeta.ignore_fields = ignore_fields
sender._concurrencymeta.increment = getattr(sender.ConcurrencyMeta, 'increment', True)
sender._concurrencymeta.skip = False
if not (sender._concurrencymeta.manually):
sender._concurrencymeta.field.wrap_model(sender)
setattr(sender, 'get_concurrency_version', get_revision_of_object)
def post_syncdb_concurrency_handler(sender, **kwargs):
from django.db import connections
from concurrency.triggers import create_triggers
databases = [alias for alias in connections]
create_triggers(databases)
class_prepared.connect(class_prepared_concurrency_handler,
dispatch_uid='class_prepared_concurrency_handler')
if conf.AUTO_CREATE_TRIGGERS:
post_migrate.connect(post_syncdb_concurrency_handler,
dispatch_uid='post_syncdb_concurrency_handler')
class VersionField(Field):
""" Base class """
def __init__(self, *args, **kwargs):
verbose_name = kwargs.get('verbose_name', None)
name = kwargs.get('name', None)
db_tablespace = kwargs.get('db_tablespace', None)
db_column = kwargs.get('db_column', None)
help_text = kwargs.get('help_text', _('record revision number'))
super().__init__(verbose_name, name,
help_text=help_text,
default=0,
db_tablespace=db_tablespace,
db_column=db_column)
def get_internal_type(self):
return "BigIntegerField"
def to_python(self, value):
return int(value)
def validate(self, value, model_instance):
pass
def formfield(self, **kwargs):
kwargs['form_class'] = self.form_class
kwargs['widget'] = forms.VersionField.widget
return super().formfield(**kwargs)
def contribute_to_class(self, cls, *args, **kwargs):
super().contribute_to_class(cls, *args, **kwargs)
if hasattr(cls, '_concurrencymeta') or cls._meta.abstract:
return
setattr(cls, '_concurrencymeta', ConcurrencyOptions())
cls._concurrencymeta.field = self
cls._concurrencymeta.base = cls
cls._concurrencymeta.triggers = []
def _set_version_value(self, model_instance, value):
setattr(model_instance, self.attname, int(value))
def pre_save(self, model_instance, add):
if add:
value = self._get_next_version(model_instance)
self._set_version_value(model_instance, value)
return getattr(model_instance, self.attname)
@classmethod
def wrap_model(cls, model, force=False):
if not force and model._concurrencymeta.versioned_save:
return
cls._wrap_model_methods(model)
model._concurrencymeta.versioned_save = True
@staticmethod
def _wrap_model_methods(model):
old_do_update = getattr(model, '_do_update')
setattr(model, '_do_update', model._concurrencymeta.field._wrap_do_update(old_do_update))
def _wrap_do_update(self, func):
def _do_update(model_instance, base_qs, using, pk_val, values, update_fields, forced_update):
version_field = model_instance._concurrencymeta.field
old_version = get_revision_of_object(model_instance)
if not version_field.model._meta.abstract:
if version_field.model is not base_qs.model:
return func(model_instance, base_qs, using, pk_val, values, update_fields, forced_update)
for i, (field, _1, value) in enumerate(values):
if field == version_field:
if (model_instance._concurrencymeta.increment and not
getattr(model_instance, '_concurrency_disable_increment', False)):
new_version = field._get_next_version(model_instance)
values[i] = (field, _1, new_version)
field._set_version_value(model_instance, new_version)
# else:
# new_version = old_version
break
# This provides a default if either (1) no values were provided or (2) we reached this code as part of a
# create. We don't need to worry about a race condition because a competing create should produce an
# error anyway.
updated = base_qs.filter(pk=pk_val).exists()
# This second situation can occur because `Model.save_base` calls `Model._save_parent` without relaying
# the `force_insert` flag that marks the process as a create. Eventually, `Model._save_table` will call
# this function as-if it were in the middle of an update. The update is expected to fail because there
# is no object to update and the caller will fall back on the create logic instead. We need to ensure
# the update fails (but does not raise an exception) under this circumstance by skipping the concurrency
# logic.
if values and updated:
if (model_instance._concurrencymeta.enabled and
conf.ENABLED and
not getattr(model_instance, '_concurrency_disabled', False) and
(old_version or conf.VERSION_FIELD_REQUIRED)):
filter_kwargs = {'pk': pk_val, version_field.attname: old_version}
updated = base_qs.filter(**filter_kwargs)._update(values) >= 1
if not updated:
version_field._set_version_value(model_instance, old_version)
updated = conf._callback(model_instance)
else:
filter_kwargs = {'pk': pk_val}
updated = base_qs.filter(**filter_kwargs)._update(values) >= 1
return updated
return update_wrapper(_do_update, func)
class IntegerVersionField(VersionField):
"""
Version Field that returns a "unique" version number for the record.
The version number is produced using time.time() * 1000000, to get the benefits
of microsecond if the system clock provides them.
"""
form_class = forms.VersionField
def _get_next_version(self, model_instance):
old_value = getattr(model_instance, self.attname, 0)
return max(int(old_value) + 1, (int(time.time() * 1000000) - OFFSET))
class AutoIncVersionField(VersionField):
"""
Version Field increment the revision number each commit
"""
form_class = forms.VersionField
def _get_next_version(self, model_instance):
return int(getattr(model_instance, self.attname, 0)) + 1
class TriggerVersionField(VersionField):
"""
Version Field increment the revision number each commit
"""
form_class = forms.VersionField
def __init__(self, *args, **kwargs):
self._trigger_name = kwargs.pop('trigger_name', None)
self._trigger_exists = False
super().__init__(*args, **kwargs)
def contribute_to_class(self, cls, *args, **kwargs):
super().contribute_to_class(cls, *args, **kwargs)
if not cls._meta.abstract or cls._meta.proxy:
if self not in _TRIGGERS:
_TRIGGERS.append(self)
def check(self, **kwargs):
errors = []
model = self.model
from django.core.checks import Warning
from django.db import connections, router
from concurrency.triggers import factory
alias = router.db_for_write(model)
connection = connections[alias]
f = factory(connection)
if not f.get_trigger(self):
errors.append(
Warning(
'Missed trigger for field {}'.format(self),
hint=None,
obj=None,
id='concurrency.W001',
)
)
return errors
@property
def trigger_name(self):
from concurrency.triggers import get_trigger_name
return get_trigger_name(self)
def _get_next_version(self, model_instance):
# always returns the same value
return int(getattr(model_instance, self.attname, 1))
def pre_save(self, model_instance, add):
# always returns the same value
return 1
@staticmethod
def _increment_version_number(obj):
old_value = get_revision_of_object(obj)
setattr(obj, obj._concurrencymeta.field.attname, int(old_value) + 1)
@staticmethod
def _wrap_model_methods(model):
super(TriggerVersionField, TriggerVersionField)._wrap_model_methods(model)
old_save = getattr(model, 'save')
setattr(model, 'save', model._concurrencymeta.field._wrap_save(old_save))
@staticmethod
def _wrap_save(func):
def inner(self, force_insert=False, force_update=False, using=None, **kwargs):
reload = kwargs.pop('refetch', False)
ret = func(self, force_insert, force_update, using, **kwargs)
TriggerVersionField._increment_version_number(self)
if reload:
ret = refetch(self)
setattr(self,
self._concurrencymeta.field.attname,
get_revision_of_object(ret))
return ret
return update_wrapper(inner, func)
def filter_fields(instance, field):
if not field.concrete:
# reverse relation
return False
if field.is_relation and field.related_model is None:
# generic foreignkeys
return False
if field.many_to_many and instance.pk is None:
# can't load remote object yet
return False
return True
class ConditionalVersionField(AutoIncVersionField):
def contribute_to_class(self, cls, *args, **kwargs):
super().contribute_to_class(cls, *args, **kwargs)
signals.post_init.connect(self._load_model,
sender=cls,
dispatch_uid=fqn(cls))
signals.post_save.connect(self._save_model,
sender=cls,
dispatch_uid=fqn(cls))
def _load_model(self, *args, **kwargs):
instance = kwargs['instance']
instance._concurrencymeta.initial = self._get_hash(instance)
def _save_model(self, *args, **kwargs):
instance = kwargs['instance']
instance._concurrencymeta.initial = self._get_hash(instance)
def _get_hash(self, instance):
values = OrderedDict()
opts = instance._meta
check_fields = instance._concurrencymeta.check_fields
ignore_fields = instance._concurrencymeta.ignore_fields
filter_ = functools.partial(filter_fields, instance)
if check_fields is None and ignore_fields is None:
fields = sorted([f.name for f in filter(filter_, instance._meta.get_fields())])
elif check_fields is None:
fields = sorted([f.name for f in filter(filter_, instance._meta.get_fields())
if f.name not in ignore_fields])
else:
fields = instance._concurrencymeta.check_fields
for field_name in fields:
# do not use getattr here. we do not need extra sql to retrieve
# FK. the raw value of the FK is enough
field = opts.get_field(field_name)
if isinstance(field, models.ManyToManyField):
values[field_name] = getattr(instance, field_name).values_list('pk', flat=True)
else:
values[field_name] = field.value_from_object(instance)
return hashlib.sha1(force_str(values).encode('utf-8')).hexdigest()
def _get_next_version(self, model_instance):
if not model_instance.pk:
return int(getattr(model_instance, self.attname) + 1)
old = model_instance._concurrencymeta.initial
new = self._get_hash(model_instance)
if old != new:
return int(getattr(model_instance, self.attname, 0) + 1)
return int(getattr(model_instance, self.attname, 0))
| mit | 076eb479702ce82439555cb6ec028c33 | 37.940678 | 116 | 0.61741 | 4.269124 | false | false | false | false |
saxix/django-concurrency | tests/demoapp/demo/migrations/0001_initial.py | 1 | 11522 | # Generated by Django 1.9.6 on 2016-09-09 15:41
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
import concurrency.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0002_concurrency_add_version_to_group'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Anything',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=10)),
],
),
migrations.CreateModel(
name='AutoIncConcurrentModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('version', concurrency.fields.AutoIncVersionField(db_column='cm_version_id', default=1, help_text='record revision number')),
('username', models.CharField(blank=True, max_length=30, null=True)),
('date_field', models.DateField(blank=True, null=True)),
],
options={
'verbose_name_plural': 'AutoIncConcurrentModel',
'verbose_name': 'AutoIncConcurrentModel',
},
),
migrations.CreateModel(
name='ConcreteModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('version', concurrency.fields.IntegerVersionField(db_column='cm_version_id', default=1, help_text='record revision number')),
('username', models.CharField(blank=True, max_length=30, null=True, unique=True)),
],
),
migrations.CreateModel(
name='ConditionalVersionModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('version', concurrency.fields.ConditionalVersionField(default=1, help_text='record revision number')),
('field1', models.CharField(blank=True, max_length=30, null=True, unique=True)),
('field2', models.CharField(blank=True, max_length=30, null=True, unique=True)),
('field3', models.CharField(blank=True, max_length=30, null=True, unique=True)),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='ConditionalVersionModelSelfRelation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('version', concurrency.fields.ConditionalVersionField(default=1, help_text='record revision number')),
('name', models.CharField(max_length=10)),
],
),
migrations.CreateModel(
name='ConditionalVersionModelWithoutMeta',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('version', concurrency.fields.ConditionalVersionField(default=1, help_text='record revision number')),
('field1', models.CharField(blank=True, max_length=30, null=True, unique=True)),
('field2', models.CharField(blank=True, max_length=30, null=True, unique=True)),
('field3', models.CharField(blank=True, max_length=30, null=True, unique=True)),
('anythings', models.ManyToManyField(to='demo.Anything')),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='DropTriggerConcurrentModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('version', concurrency.fields.TriggerVersionField(db_column='cm_version_id', default=1, help_text='record revision number')),
('username', models.CharField(blank=True, max_length=30, null=True)),
('count', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='GroupTestModel',
fields=[
('group_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='auth.Group')),
('username', models.CharField(max_length=50, verbose_name='username')),
],
bases=('auth.group',),
),
migrations.CreateModel(
name='Issue3TestModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(blank=True, max_length=30, null=True)),
('last_name', models.CharField(blank=True, max_length=30, null=True)),
('char_field', models.CharField(blank=True, max_length=30, null=True)),
('date_field', models.DateField(blank=True, null=True)),
('version', models.CharField(blank=True, default='abc', max_length=10, null=True)),
('revision', concurrency.fields.IntegerVersionField(db_column='cm_version_id', default=1, help_text='record revision number')),
],
),
migrations.CreateModel(
name='ReversionConcurrentModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('version', concurrency.fields.IntegerVersionField(db_column='cm_version_id', default=1, help_text='record revision number')),
('username', models.CharField(blank=True, max_length=30, null=True, unique=True)),
('date_field', models.DateField(blank=True, null=True)),
],
options={
'verbose_name_plural': 'Reversion-ConcurrentModels',
'verbose_name': 'Reversion-ConcurrentModel',
},
),
migrations.CreateModel(
name='SimpleConcurrentModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('version', concurrency.fields.IntegerVersionField(db_column='cm_version_id', default=1, help_text='record revision number')),
('username', models.CharField(blank=True, max_length=30, null=True, unique=True)),
('date_field', models.DateField(blank=True, null=True)),
],
options={
'verbose_name_plural': 'SimpleConcurrentModels',
'verbose_name': 'SimpleConcurrentModel',
},
),
migrations.CreateModel(
name='ThroughRelation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('version', concurrency.fields.ConditionalVersionField(default=1, help_text='record revision number')),
('left', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to='demo.ConditionalVersionModelSelfRelation')),
('right', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to='demo.ConditionalVersionModelSelfRelation')),
],
),
migrations.CreateModel(
name='TriggerConcurrentModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('version', concurrency.fields.TriggerVersionField(db_column='cm_version_id', default=1, help_text='record revision number')),
('username', models.CharField(blank=True, max_length=30, null=True)),
('count', models.IntegerField(default=0)),
],
options={
'verbose_name_plural': 'TriggerConcurrentModels',
'verbose_name': 'TriggerConcurrentModel',
},
),
migrations.CreateModel(
name='ConcurrencyDisabledModel',
fields=[
('simpleconcurrentmodel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='demo.SimpleConcurrentModel')),
('dummy_char', models.CharField(blank=True, max_length=30, null=True)),
],
bases=('demo.simpleconcurrentmodel',),
),
migrations.CreateModel(
name='CustomSaveModel',
fields=[
('simpleconcurrentmodel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='demo.SimpleConcurrentModel')),
('extra_field', models.CharField(blank=True, max_length=30, null=True, unique=True)),
],
bases=('demo.simpleconcurrentmodel',),
),
migrations.CreateModel(
name='InheritedModel',
fields=[
('simpleconcurrentmodel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='demo.SimpleConcurrentModel')),
('extra_field', models.CharField(blank=True, max_length=30, null=True, unique=True)),
],
bases=('demo.simpleconcurrentmodel',),
),
migrations.AddField(
model_name='conditionalversionmodelselfrelation',
name='relations',
field=models.ManyToManyField(blank=True, through='demo.ThroughRelation', to='demo.ConditionalVersionModelSelfRelation'),
),
migrations.AddField(
model_name='anything',
name='a_relation',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='demo.ConditionalVersionModelWithoutMeta'),
),
migrations.CreateModel(
name='ListEditableConcurrentModel',
fields=[
],
options={
'verbose_name_plural': 'ListEditableConcurrentModels',
'verbose_name': 'ListEditableConcurrentModel',
'proxy': True,
},
bases=('demo.simpleconcurrentmodel',),
),
migrations.CreateModel(
name='NoActionsConcurrentModel',
fields=[
],
options={
'verbose_name_plural': 'NoActions-ConcurrentModels',
'verbose_name': 'NoActions-ConcurrentModel',
'proxy': True,
},
bases=('demo.simpleconcurrentmodel',),
),
migrations.CreateModel(
name='ProxyModel',
fields=[
],
options={
'verbose_name_plural': 'ProxyModels',
'verbose_name': 'ProxyModel',
'proxy': True,
},
bases=('demo.simpleconcurrentmodel',),
),
]
| mit | a4ce8b273e1d116ba543d63b0ef9ab33 | 50.900901 | 218 | 0.585662 | 4.416251 | false | false | false | false |
whitesmith/hawkpost | humans/migrations/0001_initial.py | 1 | 3019 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-02-27 23:21
from __future__ import unicode_literals
import django.contrib.auth.models
import django.core.validators
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0007_alter_validators_add_error_messages'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=30, unique=True, validators=[django.core.validators.RegexValidator('^[\\w.@+-]+$', 'Enter a valid username. This value may contain only letters, numbers and @/./+/-/_ characters.')], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=30, verbose_name='last name')),
('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
]
| mit | 60a3518d1e25d8e98de0b1fef037c461 | 64.630435 | 421 | 0.641934 | 4.294452 | false | false | false | false |
pythonindia/junction | junction/conferences/management/commands/conference_moderator.py | 1 | 4433 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Permission
from django.core.management.base import BaseCommand, CommandError
from junction.conferences.models import Conference
APP_PERMISSIONS = {
"conferences": [
"change_conferencesetting",
"add_conferencevenue",
"change_conferencevenue",
"delete_conferencevenue",
"change_conference",
"add_conferencemoderator",
"change_conferencemoderator",
"delete_conferencemoderator",
"add_conferencemoderator",
"change_conferencemoderator",
"delete_conferencemoderator",
"add_conferenceproposalreviewer",
"change_conferenceproposalreviewer",
"delete_conferenceproposalreviewer",
"add_room",
"change_room",
"delete_room",
],
"proposals": [
"add_proposalsection",
"change_proposalsection",
"delete_proposalsection",
"add_proposalsectionreviewer",
"change_proposalsectionreviewer",
"delete_proposalsectionreviewer",
"add_proposaltype",
"change_proposaltype",
"delete_proposaltype",
"change_proposal",
"change_proposalvote",
"add_proposalsectionreviewervotevalue",
"change_proposalsectionreviewervotevalue",
"add_proposalsectionreviewervote",
"change_proposalsectionreviewervote",
"add_proposalcomment",
"change_proposalcomment",
"add_proposalcommentvote",
"change_proposalcommentvote",
],
"schedule": [
"add_scheduleitem",
"change_scheduleitem",
"delete_scheduleitem",
"add_scheduleitemtype",
"change_scheduleitemtype",
"delete_scheduleitemtype",
],
"devices": ["add_device", "change_device", "delete_device"],
"feedback": [
"add_textfeedbackquestion",
"change_textfeedbackquestion",
"delete_textfeedbackquestion",
"add_choicefeedbackquestion",
"change_choicefeedbackquestion",
"delete_choicefeedbackquestion",
"add_scheduleitemtextfeedback",
"change_scheduleitemtextfeedback",
"delete_scheduleitemtextfeedback",
"add_scheduleitemchoicefeedback",
"change_scheduleitemchoicefeedback",
"delete_scheduleitemchoicefeedback",
"add_choicefeedbackquestionvalue",
"change_choicefeedbackquestionvalue",
"delete_choicefeedbackquestionvalue",
],
"tickets": ["add_ticket", "change_ticket", "delete_ticket"],
}
class Command(BaseCommand):
def add_argument(self, parser):
parser.add_argument("slug", nargs=1, type=str)
parser.add_argument("email", nargs=1, type=str)
def has_conference(self, slug):
try:
conference = Conference.objects.get(slug=slug)
return conference
except Conference.DoesNotExist:
raise CommandError('Conference "{}" does not exist'.format(slug))
def create_user(self, email):
Users = get_user_model()
try:
user = Users.objects.get(email=email)
user.is_staff = True
user.is_active = True
user.save()
return user
except Users.DoesNotExist:
raise CommandError("User '{}' doesn't exist".format(user))
def add_permissions(self, user):
for app_label, permissions in APP_PERMISSIONS.items():
for perm in permissions:
term = ".".join([app_label, perm])
if user.has_perm(term):
print("User has perm: '{}'".format(term))
else:
print("User doesn't have perm: '{}'".format(term))
permission = Permission.objects.get(codename=perm)
user.user_permissions.add(permission)
print("Added permission '{}'".format(permission))
def handle(self, *args, **kwargs):
# Check conference and short circuit if missing
if len(args) < 2:
print("Two arguments are required")
return
self.has_conference(slug=args[0])
# Check and create user
user = self.create_user(email=args[1])
# Add all models to user permission
self.add_permissions(user=user)
| mit | 872392cf5a6555ee8dc78c573364b5b1 | 34.464 | 77 | 0.61561 | 4.371795 | false | false | false | false |
spectralpython/spectral | spectral/database/usgs.py | 1 | 23468 | '''
Code for reading and managing USGS spectral library data.
References:
Kokaly, R.F., Clark, R.N., Swayze, G.A., Livo, K.E., Hoefen, T.M., Pearson,
N.C., Wise, R.A., Benzel, W.M., Lowers, H.A., Driscoll, R.L., and Klein, A.J.,
2017, USGS Spectral Library Version 7: U.S. Geological Survey Data Series 1035,
61 p., https://doi.org/10.3133/ds1035.
'''
from __future__ import absolute_import, division, print_function, unicode_literals
from spectral.utilities.python23 import IS_PYTHON3, tobytes, frombytes
from .spectral_database import SpectralDatabase
import re
import logging
import sqlite3
import array
if IS_PYTHON3:
def readline(fin): return fin.readline()
def open_file(filename): return open(filename, encoding='iso-8859-1')
else:
def readline(fin): return fin.readline().decode('iso-8859-1')
def open_file(filename): return open(filename)
table_schemas = [
'CREATE TABLE Samples (SampleID INTEGER PRIMARY KEY, LibName TEXT, Record INTEGER, '
'Description TEXT, Spectrometer TEXT, Purity TEXT, MeasurementType TEXT, Chapter TEXT, FileName TEXT, '
'AssumedWLSpmeterDataID INTEGER, '
'NumValues INTEGER, MinValue FLOAT, MaxValue FLOAT, ValuesArray BLOB)',
'CREATE TABLE SpectrometerData (SpectrometerDataID INTEGER PRIMARY KEY, LibName TEXT, '
'Record INTEGER, MeasurementType TEXT, Unit TEXT, Name TEXT, Description TEXT, FileName TEXT, '
'NumValues INTEGER, MinValue FLOAT, MaxValue FLOAT, ValuesArray BLOB)'
]
arraytypecode = chr(ord('f'))
def array_from_blob(blob):
a = array.array(arraytypecode)
frombytes(a, blob)
return a
def array_to_blob(arr):
return sqlite3.Binary(tobytes(array.array(arraytypecode, arr)))
# Actually these are not all spectrometer names, but kind of it.
_spectrometer_names = {
'ASD': ['ASD'],
'ASTER': ['ASTER'],
'AVIRIS': ['AVIRIS', 'aviris'],
'BECK': ['BECK'],
'CRISM JOINED MTR3': ['CRISM Bandpass(FWHM) JOINED MTR3', 'CRISM Waves JOINED MTR3', 'CRISM Bandpass JOINED MTR3', 'CRISM JOINED MTR3'],
'CRISM GLOBAL': ['CRISM Bandpass(FWHM) GLOBAL', 'CRISM Wavelengths GLOBAL', 'CRISM Waves GLOBAL', 'CRISM GLOBAL'],
'Hyperion': ['Hyperion'],
'HyMap2': ['HyMap2'],
'Landsat8': ['Landsat8'],
'M3': ['M3'],
'NIC4': ['NIC4'],
'Sentinel2': ['Sentinel2', 'Sentinel-2'],
'VIMS': ['VIMS'],
'WorldView3': ['WorldView3']
}
class SpectrometerData:
'''
Holds data for spectrometer, from USGS spectral library.
'''
def __init__(self, libname, record, measurement_type, unit, spectrometer_name,
description, file_name, values):
self.libname = libname
self.record = record
self.measurement_type = measurement_type
self.unit = unit
self.spectrometer_name = spectrometer_name
self.description = description
self.file_name = file_name
self.values = values
def header(self):
'''
Returns:
String representation of basic meta data.
'''
return '{0} Record={1}: {2} {3} {4}'.format(self.libname, self.record,
self.measurement, self.description)
@ classmethod
def read_from_file(cls, filename):
'''
Constructs SpectrometerData from file.
Arguments:
`filename` (str):
Path to file containing data.
Returns:
A `SpectrometerData` constructed from data parsed from file.
'''
import os
logger = logging.getLogger('spectral')
with open_file(filename) as f:
header_line = readline(f)
if not header_line:
raise Exception(
'{0} has empty header line or no lines at all.'.format(filename))
libname, record, measurement_type, unit, spectrometer_name, description = \
SpectrometerData._parse_header(header_line.strip())
values = []
for line in f:
if not line:
break
try:
values.append(float(line.strip()))
except:
logger.error('In file %s found unparsable line.', filename)
file_name = os.path.basename(filename)
return cls(libname, record, measurement_type, unit, spectrometer_name, description, file_name, values)
@staticmethod
def _find_spectrometer_name(header_line):
for sname, alt_names in _spectrometer_names.items():
for alt_name in alt_names:
if alt_name in header_line:
return sname
raise Exception(
'Could not find spectrometer for header {0}'.format(header_line))
@staticmethod
def _assume_measurement_type(header_line):
header_line = header_line.lower()
# The order of checking these things is important.
if 'wavelength' in header_line or 'waves' in header_line:
return 'Wavelengths'
if 'bandpass' in header_line or 'fwhm' in header_line or 'bandwidths' in header_line:
return 'Bandpass'
if 'resolution' in header_line:
return 'Resolution'
if 'wavenumber' in header_line:
return 'Wavenumber'
if 'srf' in header_line:
return 'SRF'
raise Exception(
'Could not assume measurement type for header line {0}'.format(header_line))
@ staticmethod
def _assume_unit(header_line, measurement_type):
if measurement_type == 'Wavelengths' or measurement_type == 'Bandpass' or measurement_type == 'Resolution':
if re.search(r'\bnm\b', header_line) is not None:
return 'nanometer'
if 'nanometer' in header_line:
return 'nanometer'
# 'um', 'microns' are usually found in these files, but this is default
# anyway.
return 'micrometer'
elif measurement_type == 'Wavenumber':
return 'cm^-1'
elif measurement_type == 'SRF':
return 'none'
else:
return 'unknown'
@ staticmethod
def _parse_header(header_line):
# It is difficult to parse this data,
# things are separated by spaces, but inside of what should be single datum,
# there are spaces, so only human can get it right.
elements = header_line.split()
libname = elements[0]
# From 'Record=1234:' extract 1234.
record = int(elements[1].split('=')[1][:-1])
# Join everything after record into description.
description = ' '.join(elements[2:])
measurement_type = SpectrometerData._assume_measurement_type(
header_line)
unit = SpectrometerData._assume_unit(header_line, measurement_type)
spectrometer_name = SpectrometerData._find_spectrometer_name(
header_line)
return libname, record, measurement_type, unit, spectrometer_name, description
class SampleData:
'''
Holds parsed data for single sample from USGS spectral library.
'''
def __init__(self, libname=None, record=None, description=None, spectrometer=None,
purity=None, measurement_type=None, chapter=None, file_name=None, values=None):
self.libname = libname
self.record = record
self.description = description
self.spectrometer = spectrometer
self.purity = purity
self.measurement_type = measurement_type
self.chapter = chapter
self.file_name = file_name
self.values = values
def header(self):
'''
Returns:
String representation of basic meta data.
'''
return '{0} Record={1}: {2} {3}{4} {5}'.format(self.libname, self.record,
self.description, self.spectrometer,
self.purity, self.measurement_type)
@staticmethod
def _parse_header(header_line):
elements = header_line.split()
libname = elements[0]
# From 'Record=1234:' extract 1234.
record = int(elements[1].split('=')[1][:-1])
# Join everything after record into description.
description = ' '.join(elements[2:])
# Split 'AVIRIS13aa' into ['', 'AVIRIS13', 'aa', ''].
smpurity = re.split('([A-Z0-9]+)([a-z]+)', elements[-2])
# There is case with capital leters like 'NIC4AA'
if len(smpurity) == 1:
smpurity = re.split('([A-Z]+[0-9])([A-Z]+)', elements[-2])
smpurity[2] = smpurity[2].lower()
spectrometer = smpurity[1]
purity = smpurity[2]
measurement_type = elements[-1]
return libname, record, description, spectrometer, purity, measurement_type
@classmethod
def read_from_file(cls, filename, chapter=None):
'''
Constructs SampleData from file.
Arguments:
`filename` (str):
Path to file containing data.
Returns:
A `SampleData` constructed from data parsed from file.
'''
import os
logger = logging.getLogger('spectral')
with open(filename) as f:
header_line = f.readline()
if not header_line:
raise Exception(
'{0} has empty header line or no lines at all.'.format(filename))
libname, record, description, spectrometer, purity, measurement_type = \
SampleData._parse_header(header_line.strip())
values = []
for line in f:
if not line:
break
try:
values.append(float(line.strip()))
except:
logger.error('In file %s found unparsable line.', filename)
file_name = os.path.basename(filename)
return cls(libname, record, description, spectrometer, purity,
measurement_type, chapter, file_name, values)
class USGSDatabase(SpectralDatabase):
'''A relational database to manage USGS spectral library data.'''
schemas = table_schemas
def _assume_wavelength_spectrometer_data_id(self, sampleData):
# We can't know this for sure, but these heuristics haven't failed so far.
# Prepare paramters.
# These parameters are mandatory to match.
libname = sampleData.libname
num_values = len(sampleData.values)
# Spectrometer might not match in subdirectories where data is convolved
# or resampled. In other directories, without spectrometer there is
# few possible choices, so spectrometer isolates the one we need.
spectrometer = sampleData.spectrometer
logger = logging.getLogger('spectral')
# Start with the most specific.
query = '''SELECT SpectrometerDataID FROM SpectrometerData WHERE
MeasurementType = 'Wavelengths' AND LibName = ? AND NumValues = ?
AND Name = ?'''
result = self.cursor.execute(
query, (libname, num_values, spectrometer))
rows = result.fetchall()
if len(rows) == 0:
query = '''SELECT SpectrometerDataID FROM SpectrometerData WHERE
MeasurementType = 'Wavelengths' AND LibName = ? AND NumValues = ?
AND Name LIKE ?'''
result = self.cursor.execute(
# ASDFR -> ASD, and '%' just to be sure.
query, (libname, num_values, spectrometer[:3] + '%'))
rows = result.fetchall()
if len(rows) >= 1:
if len(rows) > 1:
logger.warning('Found multiple spectrometers with measurement_type Wavelengths, '
' LibName %s, NumValues %d and Name %s', libname, num_values, spectrometer)
return rows[0][0]
# Try to be less specific without spectrometer name.
query = '''SELECT SpectrometerDataID FROM SpectrometerData WHERE
MeasurementType = 'Wavelengths' AND LibName = ? AND NumValues = ?'''
result = self.cursor.execute(query, (libname, num_values))
rows = result.fetchall()
if len(rows) < 1:
raise Exception('Wavelengths for spectrometer not found, for LibName = {0} and NumValues = {1}, from file {2}'.format(
libname, num_values, sampleData.file_name))
if len(rows) > 1:
logger.warning('Found multiple spectrometers with measurement_type Wavelengths, '
' LibName %s and NumValues %d, from file %s', libname, num_values, sampleData.file_name)
return rows[0][0]
def _add_sample_data(self, spdata):
sql = '''INSERT INTO Samples (LibName, Record,
Description, Spectrometer, Purity, MeasurementType, Chapter, FileName,
AssumedWLSpmeterDataID,
NumValues, MinValue, MaxValue, ValuesArray)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
values = array_to_blob(spdata.values)
num_values = len(spdata.values)
min_value = min(spdata.values)
max_value = max(spdata.values)
assumedWLSpmeterDataID = self._assume_wavelength_spectrometer_data_id(spdata)
self.cursor.execute(sql, (spdata.libname, spdata.record, spdata.description,
spdata.spectrometer, spdata.purity, spdata.measurement_type,
spdata.chapter, spdata.file_name, assumedWLSpmeterDataID,
num_values, min_value, max_value, values))
rowId = self.cursor.lastrowid
self.db.commit()
return rowId
def _add_spectrometer_data(self, spdata):
sql = '''INSERT INTO SpectrometerData (LibName, Record, MeasurementType, Unit,
Name, Description, FileName, NumValues, MinValue, MaxValue, ValuesArray)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
values = array_to_blob(spdata.values)
num_values = len(spdata.values)
min_value = min(spdata.values)
max_value = max(spdata.values)
self.cursor.execute(
sql, (spdata.libname, spdata.record, spdata.measurement_type, spdata.unit,
spdata.spectrometer_name, spdata.description,
spdata.file_name, num_values, min_value, max_value, values))
rowId = self.cursor.lastrowid
self.db.commit()
return rowId
@classmethod
def create(cls, filename, usgs_data_dir=None):
'''Creates an USGS relational database by parsing USGS data files.
Arguments:
`filename` (str):
Name of the new sqlite database file to create.
`usgs_data_dir` (str):
Path to the USGS ASCII data directory. This directory should
contain subdirectories, which containes chapter directories.
E.g. if provided `usgs_data_dir` is '/home/user/usgs/ASCIIdata',
then relative path to single sample could be
'ASCIIdata_splib07b/ChapterL_Liquids/splib07b_H2O-Ice_GDS136_77K_BECKa_AREF.txt'
If this argument is not provided, no data will be imported.
Returns:
An :class:`~spectral.database.USGSDatabase` object.
Example::
>>> USGSDatabase.create("usgs_lib.db", "/home/user/usgs/ASCIIdata")
This is a class method (it does not require instantiating an
USGSDatabase object) that creates a new database by parsing files in the
USGS library ASCIIdata directory. Normally, this should only
need to be called once. Subsequently, a corresponding database object
can be created by instantiating a new USGSDatabase object with the
path the database file as its argument. For example::
>>> from spectral.database.usgs import USGSDatabase
>>> db = USGSDatabase("usgs_lib.db")
'''
import os
if os.path.isfile(filename):
raise Exception('Error: Specified file already exists.')
db = cls()
db._connect(filename)
for schema in cls.schemas:
db.cursor.execute(schema)
if usgs_data_dir:
db._import_files(usgs_data_dir)
return db
def __init__(self, sqlite_filename=None):
'''Creates a database object to interface an existing database.
Arguments:
`sqlite_filename` (str):
Name of the database file. If this argument is not provided,
an interface to a database file will not be established.
Returns:
An :class:`~spectral.USGSDatabase` connected to the database.
'''
from spectral.io.spyfile import find_file_path
if sqlite_filename:
self._connect(find_file_path(sqlite_filename))
else:
self.db = None
self.cursor = None
def _import_files(self, data_dir):
from glob import glob
import numpy
import os
logger = logging.getLogger('spectral')
if not os.path.isdir(data_dir):
raise Exception('Error: Invalid directory name specified.')
num_sample_files = 0
num_spectrometer_files = 0
num_failed_sample_files = 0
num_failed_spectromter_files = 0
for sublib in os.listdir(data_dir):
sublib_dir = os.path.join(data_dir, sublib)
if not os.path.isdir(sublib_dir):
continue
# Process instrument data one by one.
for f in glob(sublib_dir + '/*.txt'):
logger.info('Importing spectrometer file %s', f)
try:
spdata = SpectrometerData.read_from_file(f)
self._add_spectrometer_data(spdata)
num_spectrometer_files += 1
except Exception as e:
logger.error('Failed to import spectrometer file %s', f)
logger.error(e)
num_failed_spectromter_files += 1
# Go into each chapter directory and process individual samples.
for chapter in os.listdir(sublib_dir):
# Skip errorbars directory. Maybe add support for parsing it later.
if chapter == 'errorbars':
continue
chapter_dir = os.path.join(sublib_dir, chapter)
if not os.path.isdir(chapter_dir):
continue
for f in glob(chapter_dir + '/*.txt'):
logger.info('Importing sample file %s', f)
try:
spdata = SampleData.read_from_file(f, chapter)
self._add_sample_data(spdata)
num_sample_files += 1
except Exception as e:
logger.error(
'Failed to import sample file %s', f)
logger.error(e)
num_failed_sample_files += 1
logger.info('Imported %d sample files and %d spectrometer files. '
'%d failed sample files, and %d failed spectrometer files.',
num_sample_files, num_spectrometer_files, num_failed_sample_files,
num_failed_spectromter_files)
def get_spectrum(self, sampleID):
'''Returns a spectrum from the database.
Usage:
(x, y) = usgs.get_spectrum(sampleID)
Arguments:
`sampleID` (int):
The **SampleID** value for the desired spectrum from the
**Samples** table in the database.
Returns:
`x` (list):
Band centers for the spectrum.
This is extraced from assumed spectrometer for given sample.
`y` (list):
Spectrum data values for each band.
Returns a pair of vectors containing the wavelengths and measured
values values of a measurment.
'''
import array
query = '''SELECT ValuesArray, AssumedWLSpmeterDataID FROM Samples WHERE SampleID = ?'''
result = self.cursor.execute(query, (sampleID,))
rows = result.fetchall()
if len(rows) < 1:
raise Exception('Measurement record not found.')
y = array_from_blob(rows[0][0])
assumedWLSpmeterDataID = rows[0][1]
query = '''SELECT ValuesArray FROM SpectrometerData WHERE SpectrometerDataID = ?'''
result = self.cursor.execute(
query, (assumedWLSpmeterDataID,))
rows = result.fetchall()
if len(rows) < 1:
raise Exception('Measurement (wavelengths) record not found.')
x = array_from_blob(rows[0][0])
return (list(x), list(y))
def create_envi_spectral_library(self, spectrumIDs, bandInfo):
'''Creates an ENVI-formatted spectral library for a list of spectra.
Arguments:
`spectrumIDs` (list of ints):
List of **SampleID** values for of spectra in the "Samples"
table of the USGS database.
`bandInfo` (:class:`~spectral.BandInfo`):
The spectral bands to which the original USGS library spectra
will be resampled.
Returns:
A :class:`~spectral.io.envi.SpectralLibrary` object.
The IDs passed to the method should correspond to the SampleID field
of the USGS database "Samples" table. All specified spectra will be
resampled to the same discretization specified by the bandInfo
parameter. See :class:`spectral.BandResampler` for details on the
resampling method used.
Note that expected units for bands are micrometers.
'''
from spectral.algorithms.resampling import BandResampler
from spectral.io.envi import SpectralLibrary
import numpy
import unicodedata
spectra = numpy.empty((len(spectrumIDs), len(bandInfo.centers)))
cursor = self.cursor.execute('''
SELECT a.ValuesArray, b.ValuesArray, a.Description, b.Unit
FROM Samples AS a INNER JOIN SpectrometerData AS b
ON a.AssumedWLSpmeterDataID = b.SpectrometerDataID
WHERE a.SampleID IN ({0})'''.format(','.join(['?']*len(spectrumIDs))),
spectrumIDs)
names = []
for i, s in enumerate(cursor):
y = array_from_blob(s[0])
x = array_from_blob(s[1])
name = s[2]
unit = s[3]
if unit == 'nanometers':
x /= 1000
resample = BandResampler(
x, bandInfo.centers, None, bandInfo.bandwidths)
spectra[i] = resample(y)
names.append(unicodedata.normalize('NFKD', name).
encode('ascii', 'ignore'))
header = {}
header['wavelength units'] = 'um'
header['spectra names'] = names
header['wavelength'] = bandInfo.centers
header['fwhm'] = bandInfo.bandwidths
return SpectralLibrary(spectra, header, {})
| mit | 5d0a4915eaf7bd1857ee2749ebeed8b8 | 37.854305 | 140 | 0.579981 | 4.106387 | false | false | false | false |
pythonindia/junction | junction/base/emailer.py | 1 | 1234 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from os import path
from django.conf import settings
from django.core.mail import send_mail
from django.template.loader import render_to_string
def send_email(to, context, template_dir):
"""Render given templates and send email to `to`.
:param to: User object to send email to..
:param context: dict containing which needs to be passed to django template
:param template_dir: We expect files message.txt, subject.txt,
message.html etc in this folder.
:returns: None
:rtype: None
"""
def to_str(template_name):
return render_to_string(path.join(template_dir, template_name), context).strip()
subject = to_str("subject.txt")
text_message = to_str("message.txt")
html_message = to_str("message.html")
from_email = settings.DEFAULT_FROM_EMAIL
recipient_list = [_format_email(to)]
return send_mail(
subject, text_message, from_email, recipient_list, html_message=html_message
)
def _format_email(user):
return (
user.email
if user.first_name and user.last_name
else '"{} {}" <{}>'.format(user.first_name, user.last_name, user.email)
)
| mit | 7d5eee4c238564111ebf37e054bde373 | 28.380952 | 88 | 0.672609 | 3.661721 | false | false | false | false |
spectralpython/spectral | spectral/tests/database.py | 1 | 6646 | '''
Runs unit tests of functions associated with spectral databases.
To run the unit tests, type the following from the system command line:
# python -m spectral.tests.database
Note that the ECOSTRESS database must be requested so if the data files are
not located on the local file system, these tests will be skipped.
'''
from __future__ import absolute_import, division, print_function, unicode_literals
import numpy as np
import os
from numpy.testing import assert_almost_equal
import spectral as spy
from spectral.io.aviris import read_aviris_bands
from spectral.tests import testdir
from spectral.tests.spytest import SpyTest
ECOSTRESS_DATA_DIR = os.path.join(os.path.split(__file__)[0],
'data/ecostress')
ECOSTRESS_DB = os.path.join(testdir, 'ecostress.db')
USGS_DATA_DIR = os.path.join(os.path.split(__file__)[0],
'data/usgs/ASCIIdata')
USGS_DB = os.path.join(testdir, 'usgs.db')
AVIRIS_BAND_FILE = os.path.join(os.path.split(__file__)[0], 'data/92AV3C.spc')
class ECOSTRESSDatabaseCreationTest(SpyTest):
'''Tests ECOSTRESS database creation from text files.'''
def __init__(self):
pass
def setup(self):
if not os.path.isdir(testdir):
os.makedirs(testdir)
if os.path.exists(ECOSTRESS_DB):
os.remove(ECOSTRESS_DB)
def test_create_database(self):
'''Test creating new database from ECOSTRESS data files.'''
db = spy.EcostressDatabase.create(ECOSTRESS_DB,
ECOSTRESS_DATA_DIR)
assert(list(db.query('SELECT COUNT() FROM Spectra'))[0][0] == 3)
class ECOSTRESSDatabaseTest(SpyTest):
'''Tests that ECOSTRESS database works properly'''
def __init__(self):
pass
def setup(self):
self.db = spy.EcostressDatabase(ECOSTRESS_DB)
def test_read_signatures(self):
'''Can get spectra from the opened database.'''
assert(list(self.db.query('SELECT COUNT() FROM Spectra'))[0][0] == 3)
def test_create_envi_lib(self):
'''Can resample spectra and create an ENVI spectral library.'''
bands = read_aviris_bands(AVIRIS_BAND_FILE)
cursor = self.db.query('SELECT SpectrumID FROM Spectra')
ids = [r[0] for r in cursor]
bands.centers = [x / 1000. for x in bands.centers]
bands.bandwidths = [x / 1000. for x in bands.bandwidths]
slib = self.db.create_envi_spectral_library(ids, bands)
assert(slib.spectra.shape == (3, 220))
class USGSDatabaseCreationTest(SpyTest):
'''Tests USGS database creation from text files.'''
def __init__(self):
pass
def setup(self):
if not os.path.isdir(testdir):
os.makedirs(testdir)
if os.path.exists(USGS_DB):
os.remove(USGS_DB)
def test_create_database(self):
'''Test creating new database from USGS data files.'''
db = spy.USGSDatabase.create(USGS_DB, USGS_DATA_DIR)
assert(list(db.query('SELECT COUNT() FROM Samples'))[0][0] == 8)
assert(list(db.query('SELECT COUNT() FROM SpectrometerData'))
[0][0] == 13)
class USGSDatabaseTest(SpyTest):
'''Tests that USGS database works properly'''
def __init__(self):
pass
def setup(self):
self.db = spy.USGSDatabase(USGS_DB)
def test_read_signatures(self):
'''Can get spectra from the opened database.'''
assert(list(self.db.query('SELECT COUNT() FROM Samples'))[0][0] == 8)
assert(list(self.db.query('SELECT COUNT() FROM SpectrometerData'))
[0][0] == 13)
some_sample = list(self.db.query('''SELECT Chapter, FileName,
AssumedWLSpmeterDataID,
NumValues, MinValue, MaxValue
FROM Samples
WHERE LibName='liba' AND Record=1 AND
Description='Material a b0 0 ASDFRa AREF' AND
Spectrometer='ASDFR' AND Purity='a' AND MeasurementType='AREF'
'''))[0]
assert(some_sample[0] == 'ChapterB_b0')
assert(some_sample[1] == 'liba_Material_a_b0_0_ASDFRa_AREF.txt')
assert(some_sample[3] == 24)
assert_almost_equal(some_sample[4], 0.33387077)
assert_almost_equal(some_sample[5], 0.51682192)
some_spectrometer_data = list(self.db.query('''SELECT LibName, Record, MeasurementType, Unit,
Name, Description, FileName, NumValues, MinValue, MaxValue
FROM SpectrometerData
WHERE SpectrometerDataID=?
''', (some_sample[2],)))[0]
assert(some_spectrometer_data[0] == 'liba')
assert(some_spectrometer_data[1] == 13)
assert(some_spectrometer_data[2] == 'Wavelengths')
assert(some_spectrometer_data[3] == 'micrometer')
assert(some_spectrometer_data[4] == 'ASD')
assert(some_spectrometer_data[5] == 'Wavelengths ASD 0.35-2.5 um')
assert(some_spectrometer_data[6] ==
'liba_Wavelengths_ASD_0.35-2.5_um.txt')
assert(some_spectrometer_data[7] == 24)
assert_almost_equal(some_spectrometer_data[8], 0.35)
assert_almost_equal(some_spectrometer_data[9], 2.5)
def test_get_spectrum(self):
some_sample_id = list(self.db.query('''SELECT SampleID
FROM Samples
WHERE LibName='libc' AND Description='Material D 2 AVIRISb RTGC'
'''))[0][0]
(x, y) = self.db.get_spectrum(some_sample_id)
assert(len(x) == len(y))
assert(len(y) == 7)
assert_almost_equal(y[0], 0.010381651)
assert_almost_equal(x[-1], 2.2020326)
def test_create_envi_lib(self):
'''Can resample spectra and create an ENVI spectral library.'''
bands = read_aviris_bands(AVIRIS_BAND_FILE)
cursor = self.db.query('SELECT SampleID FROM Samples')
ids = [r[0] for r in cursor]
bands.centers = [x / 1000. for x in bands.centers]
bands.bandwidths = [x / 1000. for x in bands.bandwidths]
slib = self.db.create_envi_spectral_library(ids, bands)
assert(slib.spectra.shape == (8, 220))
def run():
print('\n' + '-' * 72)
print('Running database tests.')
print('-' * 72)
for T in [ECOSTRESSDatabaseCreationTest, ECOSTRESSDatabaseTest, USGSDatabaseCreationTest, USGSDatabaseTest]:
T().run()
if __name__ == '__main__':
from spectral.tests.run import parse_args, reset_stats, print_summary
import logging
logging.getLogger('spectral').setLevel(logging.ERROR)
parse_args()
reset_stats()
run()
print_summary()
| mit | 034fa493e040b8c680feb1060ccffe3b | 36.337079 | 112 | 0.616611 | 3.409954 | false | true | false | false |
spectralpython/spectral | spectral/algorithms/classifiers.py | 1 | 16400 | '''
Supervised classifiers and base class for all classifiers.
'''
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import math
import numpy
import numpy as np
from warnings import warn
import spectral as spy
from .algorithms import GaussianStats, ImageIterator
from .detectors import RX
from .perceptron import Perceptron
__all__ = ('GaussianClassifier', 'MahalanobisDistanceClassifier',
'PerceptronClassifier')
class Classifier(object):
'''
Base class for Classifiers. Child classes must implement the
classify_spectrum method.
'''
# It is often faster to compute the detector/classifier scores for the
# entire image for each class, rather than for each class on a per-pixel
# basis. However, this significantly increases memory requirements. If
# the following parameter is True, class scores will be computed for the
# entire image.
cache_class_scores = True
def __init__(self):
pass
def classify_spectrum(self, *args, **kwargs):
raise NotImplementedError('Classifier.classify_spectrum must be '
'overridden by a child class.')
def classify_image(self, image):
'''Classifies an entire image, returning a classification map.
Arguments:
`image` (ndarray or :class:`spectral.Image`)
The `MxNxB` image to classify.
Returns (ndarray):
An `MxN` ndarray of integers specifying the class for each pixel.
'''
status = spy._status
status.display_percentage('Classifying image...')
it = ImageIterator(image)
class_map = np.zeros(image.shape[:2], np.int16)
N = it.get_num_elements()
i, inc = (0, N / 100)
for spectrum in it:
class_map[it.row, it.col] = self.classify_spectrum(spectrum)
i += 1
if not i % inc:
status.update_percentage(float(i) / N * 100.)
status.end_percentage()
return class_map
def classify(self, X, **kwargs):
if X.ndim == 1:
return self.classify_spectrum(X, **kwargs)
else:
return self.classify_image(X, **kwargs)
class SupervisedClassifier(Classifier):
def __init__(self):
pass
def train(self):
pass
class GaussianClassifier(SupervisedClassifier):
'''A Gaussian Maximum Likelihood Classifier'''
def __init__(self, training_data=None, min_samples=None):
'''Creates the classifier and optionally trains it with training data.
Arguments:
`training_data` (:class:`~spectral.algorithms.TrainingClassSet`):
The training classes on which to train the classifier.
`min_samples` (int) [default None]:
Minimum number of samples required from a training class to
include it in the classifier.
'''
if min_samples:
self.min_samples = min_samples
else:
self.min_samples = None
if training_data:
self.train(training_data)
def train(self, training_data):
'''Trains the classifier on the given training data.
Arguments:
`training_data` (:class:`~spectral.algorithms.TrainingClassSet`):
Data for the training classes.
'''
logger = logging.getLogger('spectral')
if not self.min_samples:
# Set minimum number of samples to the number of bands in the image
self.min_samples = training_data.nbands
logger.info('Setting min samples to %d', self.min_samples)
self.classes = []
for cl in training_data:
if cl.size() >= self.min_samples:
self.classes.append(cl)
else:
logger.warn('Omitting class %3d : only %d samples present',
cl.index, cl.size())
for cl in self.classes:
if not hasattr(cl, 'stats') or not cl.stats_valid():
cl.calc_stats()
def classify_spectrum(self, x):
'''
Classifies a pixel into one of the trained classes.
Arguments:
`x` (list or rank-1 ndarray):
The unclassified spectrum.
Returns:
`classIndex` (int):
The index for the :class:`~spectral.algorithms.TrainingClass`
to which `x` is classified.
'''
scores = np.empty(len(self.classes))
for (i, cl) in enumerate(self.classes):
delta = (x - cl.stats.mean)
scores[i] = math.log(cl.class_prob) - 0.5 * cl.stats.log_det_cov \
- 0.5 * delta.dot(cl.stats.inv_cov).dot(delta)
return self.classes[np.argmax(scores)].index
def classify_image(self, image):
'''Classifies an entire image, returning a classification map.
Arguments:
`image` (ndarray or :class:`spectral.Image`)
The `MxNxB` image to classify.
Returns (ndarray):
An `MxN` ndarray of integers specifying the class for each pixel.
'''
if not (self.cache_class_scores and isinstance(image, np.ndarray)):
return super(GaussianClassifier, self).classify_image(image)
status = spy._status
status.display_percentage('Processing...')
shape = image.shape
image = image.reshape(-1, shape[-1])
scores = np.empty((image.shape[0], len(self.classes)), np.float64)
delta = np.empty_like(image, dtype=np.float64)
# For some strange reason, creating Y with np.emtpy_like will sometimes
# result in the following error when attempting an in-place np.dot:
# ValueError: output array is not acceptable (must have the right
# type, nr dimensions, and be a C-Array)
# It appears that this may be happening when delta is not contiguous,
# although it isn't clear why the alternate construction of Y below
# does work.
Y = np.empty_like(delta)
for (i, c) in enumerate(self.classes):
scalar = math.log(c.class_prob) - 0.5 * c.stats.log_det_cov
delta = np.subtract(image, c.stats.mean, out=delta)
try:
Y = delta.dot(-0.5 * c.stats.inv_cov, out=Y)
except:
# Unable to output np.dot to existing array. Allocate new
# storage instead. This will not affect results but may be
# slower.
Y = delta.dot(-0.5 * c.stats.inv_cov)
scores[:, i] = np.einsum('ij,ij->i', Y, delta)
scores[:, i] += scalar
status.update_percentage(100. * (i + 1) / len(self.classes))
status.end_percentage()
inds = np.array([c.index for c in self.classes], dtype=np.int16)
mins = np.argmax(scores, axis=-1)
return inds[mins].reshape(shape[:2])
class MahalanobisDistanceClassifier(GaussianClassifier):
'''A Classifier using Mahalanobis distance for class discrimination'''
def train(self, trainingData):
'''Trains the classifier on the given training data.
Arguments:
`trainingData` (:class:`~spectral.algorithms.TrainingClassSet`):
Data for the training classes.
'''
GaussianClassifier.train(self, trainingData)
covariance = numpy.zeros(self.classes[0].stats.cov.shape, numpy.float)
nsamples = np.sum(cl.stats.nsamples for cl in self.classes)
for cl in self.classes:
covariance += (cl.stats.nsamples / float(nsamples)) * cl.stats.cov
self.background = GaussianStats(cov=covariance)
def classify_spectrum(self, x):
'''
Classifies a pixel into one of the trained classes.
Arguments:
`x` (list or rank-1 ndarray):
The unclassified spectrum.
Returns:
`classIndex` (int):
The index for the :class:`~spectral.algorithms.TrainingClass`
to which `x` is classified.
'''
scores = np.empty(len(self.classes))
for (i, cl) in enumerate(self.classes):
delta = (x - cl.stats.mean)
scores[i] = delta.dot(self.background.inv_cov).dot(delta)
return self.classes[np.argmin(scores)].index
def classify_image(self, image):
'''Classifies an entire image, returning a classification map.
Arguments:
`image` (ndarray or :class:`spectral.Image`)
The `MxNxB` image to classify.
Returns (ndarray):
An `MxN` ndarray of integers specifying the class for each pixel.
'''
if not (self.cache_class_scores and isinstance(image, np.ndarray)):
return super(MahalanobisDistanceClassifier,
self).classify_image(image)
# We can cheat here and just compute RX scores for the image for each
# class, keeping the background covariance constant and setting the
# background mean to the mean of the particular class being evaluated.
scores = np.empty(image.shape[:2] + (len(self.classes),), np.float64)
status = spy._status
status.display_percentage('Processing...')
rx = RX()
for (i, c) in enumerate(self.classes):
self.background.mean = c.stats.mean
rx.set_background(self.background)
scores[:, :, i] = rx(image)
status.update_percentage(100. * (i + 1) / len(self.classes))
status.end_percentage()
inds = np.array([c.index for c in self.classes], np.int16)
mins = np.argmin(scores, axis=-1)
return inds[mins]
class PerceptronClassifier(Perceptron, SupervisedClassifier):
'''A multi-layer perceptron classifier with backpropagation learning.
Multi-layer perceptrons often require many (i.e., thousands) of iterations
through the traning data to converge on a solution. Therefore, it is not
recommended to attempt training a network on full-dimensional hyperspectral
data or even on a full set of image pixels. It is likely preferable to
first train the network on a subset of the data, then retrain the network
(starting with network weights from initial training) on the full data
set.
Example usage: Train an MLP with 20 samples from each training class after
performing dimensionality reduction:
>>> classes = create_training_classes(data, gt)
>>> fld = linear_discriminant(classes)
>>> xdata = fld.transform(data)
>>> classes = create_training_classes(xdata, gt)
>>> nfeatures = xdata.shape[-1]
>>> nclasses = len(classes)
>>>
>>> p = PerceptronClassifier([nfeatures, 20, 8, nclasses])
>>> p.train(classes, 20, clip=0., accuracy=100., batch=1,
>>> momentum=0.3, rate=0.3)
>>> c = p.classify(xdata)
'''
def train(self, training_data, samples_per_class=0, *args, **kwargs):
'''Trains the Perceptron on the training data.
Arguments:
`training_data` (:class:`~spectral.TrainingClassSet`):
Data for the training classes.
`samples_per_class` (int):
Maximum number of training observations to user from each
class in `training_data`. If this argument is not provided,
all training data is used.
Keyword Arguments:
`accuracy` (float):
The percent training accuracy at which to terminate training, if
the maximum number of iterations are not reached first. This
value can be set greater than 100 to force a specified number of
training iterations to be performed (e.g., to continue reducing
the error term after 100% classification accuracy has been
achieved.
`rate` (float):
The perceptron learning rate (typically in the range (0, 1]).
`momentum` (float):
The perceptron learning momentum term, which specifies the
fraction of the previous update value that should be added to
the current update term. The value should be in the range [0, 1).
`batch` (positive integer):
Specifies how many samples should be evaluated before an update
is made to the perceptron weights. A value of 0 indicates batch
updates should be performed (evaluate all training inputs prior
to updating). Otherwise, updates will be aggregated for every
`batch` inputs (i.e., `batch` == 1 is stochastic learning).
`clip` (float >= 0):
Optional clipping value to limit sigmoid output during training.
The sigmoid function has output in the range (0, 1). If the
`clip` argument is set to `a` then all neuron outputs for the
layer will be constrained to the range [a, 1 - a]. This can
improve perceptron learning rate in some situations.
After training the perceptron with a clipping value, `train` can
be called again with clipping set to 0 to continue reducing the
training error.
`on_iteration` (callable):
A callable object that accepts the perceptron as input and
returns bool. If this argument is set, the object will be called
at the end of each training iteration with the perceptron as its
argument. If the callable returns True, training will terminate.
`stdout`:
An object with a `write` method that can be set to redirect
training status messages somewhere other than stdout. To
suppress output, set `stdout` to None.
Return value:
Returns True if desired accuracy was achieved.
Neural networks can require many iterations through a data set to
converge. If convergence slows (as indicated by small changes in
residual error), training can be terminated by pressing CTRL-C, which
will preserve the network weights from the previous training iteration.
`train` can then be called again with altered training parameters
(e.g., increased learning rate or momentum) to increase the convergence
rate.
'''
status = spy._status
settings = spy.settings
# Number of Perceptron inputs must equal number of features in the
# training data.
if len(training_data) != self.layers[-1].shape[0]:
raise Exception('Number of nodes in output layer must match '
'number of training classes.')
self.training_data = training_data
# Map output nodes to class indices
self.indices = [cl.index for cl in self.training_data]
class_data = [np.array([x for x in cl]) for cl in self.training_data]
if samples_per_class > 0:
for i in range(len(class_data)):
if class_data[i].shape[0] > samples_per_class:
class_data[i] = class_data[i][:samples_per_class]
X = np.vstack(class_data)
y = np.hstack([np.ones(c.shape[0], dtype=np.int16) * i for \
(i, c) in enumerate(class_data)])
Y = np.eye(np.max(y) + 1, dtype=np.int16)[y]
if 'stdout' in kwargs:
stdout = kwargs.pop('stdout')
elif settings.show_progress is True:
stdout = status
else:
stdout = None
return Perceptron.train(self, X, Y, *args, stdout=stdout, **kwargs)
def classify_spectrum(self, x):
'''
Classifies a pixel into one of the trained classes.
Arguments:
`x` (list or rank-1 ndarray):
The unclassified spectrum.
Returns:
`classIndex` (int):
The index for the :class:`~spectral.TrainingClass`
to which `x` is classified.
'''
y = self.input(x)
return self.indices[np.argmax(y)]
def classify(self, X, **kwargs):
return Classifier.classify(self, X, **kwargs)
| mit | 8b6569df9b0a087b0393d8cb947f3c5b | 35.853933 | 82 | 0.599024 | 4.331749 | false | false | false | false |
pythonindia/junction | junction/schedule/migrations/0003_scheduleitemtype.py | 1 | 2711 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
SCHEDULE_ITEM_TYPES = ["Talk", "Lunch", "Break", "Workshop", "Poster", "Open Space"]
def load_fixture(apps, schema_editor):
Model = apps.get_model("schedule", "ScheduleItemType")
for item_type in SCHEDULE_ITEM_TYPES:
Model.objects.create(title=item_type)
def unload_fixture(apps, schema_editor):
Model = apps.get_model("schedule", "ScheduleItemType")
for item_type in SCHEDULE_ITEM_TYPES:
for obj in Model.objects.filter(title=item_type):
obj.delete()
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("schedule", "0002_auto_20150831_0043"),
]
operations = [
migrations.CreateModel(
name="ScheduleItemType",
fields=[
(
"id",
models.AutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
(
"created_at",
models.DateTimeField(auto_now_add=True, verbose_name="Created At"),
),
(
"modified_at",
models.DateTimeField(
auto_now=True, verbose_name="Last Modified At"
),
),
("title", models.CharField(max_length=100)),
(
"created_by",
models.ForeignKey(
related_name="created_scheduleitemtype_set",
verbose_name="Created By",
blank=True,
on_delete=models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
null=True,
),
),
(
"modified_by",
models.ForeignKey(
related_name="updated_scheduleitemtype_set",
verbose_name="Modified By",
blank=True,
on_delete=models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
null=True,
),
),
],
options={"abstract": False},
bases=(models.Model,),
),
migrations.RunPython(load_fixture, reverse_code=unload_fixture),
]
| mit | b7545ec2ac2e1095c3abe6670c59889f | 32.469136 | 87 | 0.459609 | 4.94708 | false | false | false | false |
spectralpython/spectral | spectral/algorithms/resampling.py | 1 | 7807 | '''
Functions for resampling a spectrum from one band discretization to another.
'''
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import math
import numpy as np
from ..spectral import BandInfo
def erf_local(x):
# save the sign of x
sign = 1 if x >= 0 else -1
x = abs(x)
# constants
a1 = 0.254829592
a2 = -0.284496736
a3 = 1.421413741
a4 = -1.453152027
a5 = 1.061405429
p = 0.3275911
# A&S formula 7.1.26
t = 1.0/(1.0 + p*x)
y = 1.0 - (((((a5*t + a4)*t) + a3)*t + a2)*t + a1)*t*math.exp(-x*x)
return sign*y # erf(-x) = -erf(x)
try:
from math import erf
except:
try:
from scipy.special import erf
except:
erf = erf_local
def erfc(z):
'''Complement of the error function.'''
return 1.0 - erf(z)
def normal_cdf(x):
'''CDF of the normal distribution.'''
sqrt2 = 1.4142135623730951
return 0.5 * erfc(-x / sqrt2)
def normal_integral(a, b):
'''Integral of the normal distribution from a to b.'''
return normal_cdf(b) - normal_cdf(a)
def ranges_overlap(R1, R2):
'''Returns True if there is overlap between ranges of pairs R1 and R2.'''
if (R1[0] < R2[0] and R1[1] < R2[0]) or \
(R1[0] > R2[1] and R1[1] > R2[1]):
return False
return True
def overlap(R1, R2):
'''Returns (min, max) of overlap between the ranges of pairs R1 and R2.'''
return (max(R1[0], R2[0]), min(R1[1], R2[1]))
def normal(mean, stdev, x):
sqrt_2pi = 2.5066282746310002
return math.exp(-((x - mean) / stdev)**2 / 2.0) / (sqrt_2pi * stdev)
def build_fwhm(centers):
'''Returns FWHM list, assuming FWHM is midway between adjacent bands.
'''
fwhm = [0] * len(centers)
fwhm[0] = centers[1] - centers[0]
fwhm[-1] = centers[-1] - centers[-2]
for i in range(1, len(centers) - 1):
fwhm[i] = (centers[i + 1] - centers[i - 1]) / 2.0
return fwhm
def create_resampling_matrix(centers1, fwhm1, centers2, fwhm2):
'''
Returns a resampling matrix to convert spectra from one band discretization
to another. Arguments are the band centers and full-width half maximum
spectral response for the original and new band discretizations.
'''
logger = logging.getLogger('spectral')
sqrt_8log2 = 2.3548200450309493
N1 = len(centers1)
N2 = len(centers2)
bounds1 = [[centers1[i] - fwhm1[i] / 2.0, centers1[i] + fwhm1[i] /
2.0] for i in range(N1)]
bounds2 = [[centers2[i] - fwhm2[i] / 2.0, centers2[i] + fwhm2[i] /
2.0] for i in range(N2)]
M = np.zeros([N2, N1])
jStart = 0
nan = float('nan')
for i in range(N2):
stdev = fwhm2[i] / sqrt_8log2
j = jStart
# Find the first original band that overlaps the new band
while j < N1 and bounds1[j][1] < bounds2[i][0]:
j += 1
if j == N1:
logger.info(('No overlap for target band %d (%f / %f)' % (
i, centers2[i], fwhm2[i])))
M[i, 0] = nan
continue
matches = []
# Get indices for all original bands that overlap the new band
while j < N1 and bounds1[j][0] < bounds2[i][1]:
if ranges_overlap(bounds1[j], bounds2[i]):
matches.append(j)
j += 1
# Put NaN in first element of any row that doesn't produce a band in
# the new schema.
if len(matches) == 0:
logger.info('No overlap for target band %d (%f / %f)',
i, centers2[i], fwhm2[i])
M[i, 0] = nan
continue
# Determine the weights for the original bands that overlap the new
# band. There may be multiple bands that overlap or even just a single
# band that only partially overlaps. Weights are normoalized so either
# case can be handled.
overlaps = [overlap(bounds1[k], bounds2[i]) for k in matches]
contribs = np.zeros(len(matches))
A = 0.
for k in range(len(matches)):
#endNorms = [normal(centers2[i], stdev, x) for x in overlaps[k]]
#dA = (overlaps[k][1] - overlaps[k][0]) * sum(endNorms) / 2.0
(a, b) = [(x - centers2[i]) / stdev for x in overlaps[k]]
dA = normal_integral(a, b)
contribs[k] = dA
A += dA
contribs = contribs / A
for k in range(len(matches)):
M[i, matches[k]] = contribs[k]
return M
class BandResampler:
'''A callable object for resampling spectra between band discretizations.
A source band will contribute to any destination band where there is
overlap between the FWHM of the two bands. If there is an overlap, an
integral is performed over the region of overlap assuming the source band
data value is constant over its FWHM (since we do not know the true
spectral load over the source band) and the destination band has a Gaussian
response function. Any target bands that do not have any overlapping source
bands will contain NaN as the resampled band value.
If bandwidths are not specified for source or destination bands, the bands
are assumed to have FWHM values that span half the distance to the adjacent
bands.
'''
def __init__(self, centers1, centers2, fwhm1=None, fwhm2=None):
'''BandResampler constructor.
Usage:
resampler = BandResampler(bandInfo1, bandInfo2)
resampler = BandResampler(centers1, centers2, [fwhm1 = None [, fwhm2 = None]])
Arguments:
`bandInfo1` (:class:`~spectral.BandInfo`):
Discretization of the source bands.
`bandInfo2` (:class:`~spectral.BandInfo`):
Discretization of the destination bands.
`centers1` (list):
floats defining center values of source bands.
`centers2` (list):
floats defining center values of destination bands.
`fwhm1` (list):
Optional list defining FWHM values of source bands.
`fwhm2` (list):
Optional list defining FWHM values of destination bands.
Returns:
A callable BandResampler object that takes a spectrum corresponding
to the source bands and returns the spectrum resampled to the
destination bands.
If bandwidths are not specified, the associated bands are assumed to
have FWHM values that span half the distance to the adjacent bands.
'''
if isinstance(centers1, BandInfo):
fwhm1 = centers1.bandwidths
centers1 = centers1.centers
if isinstance(centers2, BandInfo):
fwhm2 = centers2.bandwidths
centers2 = centers2.centers
if fwhm1 is None:
fwhm1 = build_fwhm(centers1)
if fwhm2 is None:
fwhm2 = build_fwhm(centers2)
self.matrix = create_resampling_matrix(
centers1, fwhm1, centers2, fwhm2)
def __call__(self, spectrum):
'''Takes a source spectrum as input and returns a resampled spectrum.
Arguments:
`spectrum` (list or :class:`numpy.ndarray`):
list or vector of values to be resampled. Must have same
length as the source band discretiation used to created the
resampler.
Returns:
A resampled rank-1 :class:`numpy.ndarray` with length corresponding
to the destination band discretization used to create the resampler.
Any target bands that do not have at lease one overlapping source band
will contain `float('nan')` as the resampled band value.'''
return np.dot(self.matrix, spectrum)
| mit | 707d4dc1527553aa9165c0878e0f00b0 | 31.802521 | 90 | 0.596644 | 3.524605 | false | false | false | false |
oss/shrunk | backend/shrunk/api/security.py | 1 | 5372 | from crypt import methods
from os import link
from pydoc import cli
from typing import Any
from flask import Blueprint, abort, current_app, jsonify
from shrunk.client import ShrunkClient
from ..client.exceptions import NoSuchObjectException, InvalidStateChange
from shrunk.util.decorators import require_login
from bson import ObjectId
__all__ = ['bp']
bp = Blueprint('security', __name__, url_prefix='/api/v1/security')
@bp.route('/promote/<ObjectId:link_id>', methods=['PATCH'])
@require_login
def promote(netid: str, client: ShrunkClient, link_id: ObjectId) -> Any:
"""``PATCH /api/v1/security/promote``
Promotes a pending link to an actual link, creating a link document in
the link collection.
:param link_id: id of link to promote
"""
if not client.roles.has('admin', netid):
abort(403)
current_app.logger.warning(f'calling link status with objectid of {link_id}')
current_app.logger.warning(client.security.get_pending_links())
try:
link_id = client.security.promote_link(netid, link_id)
except NoSuchObjectException:
return jsonify({'errors': ['link is not pending']}), 404
except InvalidStateChange:
return jsonify({'errors': ['cannot promote non-pending link']}), 409
except Exception as err:
current_app.logger.warning(err)
return jsonify({'_id': link_id}), 200
@bp.route('/reject/<ObjectId:link_id>', methods=['PATCH'])
@require_login
def reject(netid: str, client: ShrunkClient, link_id: ObjectId) -> Any:
"""``PATCH /api/v1/security/patch``
Rejects a pending link, denying link creation in link collection forever.
:param link_id: id of link to reject
"""
if not client.roles.has('admin', netid):
abort(403)
try:
client.security.reject_link(netid, link_id)
except NoSuchObjectException:
return jsonify({'errors': ['link is not pending']}), 404
except InvalidStateChange:
return jsonify({'errors': ['cannot demote non-pending link']}), 409
except Exception as err:
current_app.logger.warning(err)
return jsonify({}), 200
@bp.route('/security_test/<b32:long_url>', methods=['GET'])
@require_login
def security_test(netid: str, client: ShrunkClient, long_url: str) -> Any:
"""``GET /api/v1/security/security_test/<b32:long_url>``
This endpoint is meant for testing purposes only; it should only be called in the unit tests.
The purpose of this endpoint is to modularize testing of the security measures. In the case
that the security measures do not work, this test will be the first to clearly show that.
"""
if not client.roles.has('admin', netid):
abort(403)
return jsonify({'detected': client.security.security_risk_detected(long_url)})
@bp.route('/pending_links', methods=['GET'])
@require_login
def get_pending_links(netid: str, client: ShrunkClient) -> Any:
"""``GET /api/v1/security/pending_links``
Retrieves a list of pending links
"""
if not client.roles.has('admin', netid):
abort(403)
return jsonify({'pendingLinks': client.security.get_pending_links()}), 200
@bp.route('/pending_links/count', methods=['GET'])
@require_login
def get_pending_link_count(netid: str, client: ShrunkClient) -> Any:
"""``GET /api/v1/security/pending_links/count``
Retrieves the length of the list of pending links
"""
if not client.roles.has('admin', netid):
abort(403)
return jsonify({
'pending_links_count': client.security.get_number_of_pending_links()
}), 200
@bp.route('/status/<ObjectId:link_id>', methods=['GET'])
@require_login
def get_link_status(netid: str, client: ShrunkClient, link_id: ObjectId) -> Any:
"""``GET /api/v1/security/status/<ObjectId:link_id>``
Gets the status of a pending link by id.
:param link_id:
"""
if not client.roles.has('admin', netid):
abort(403)
try:
link_document = client.security.get_unsafe_link_document(link_id)
except NoSuchObjectException:
return jsonify({'error': ['object does not exist']}), 404
except Exception:
return jsonify({'error': ['an unknown exception when getting link status']}), 500
return jsonify({
'title': link_document['title'],
'status': link_document['status']
}), 200
@bp.route('/toggle', methods=['PATCH'])
@require_login
def toggle_security(netid: str, client: ShrunkClient) -> Any:
"""``PATCH /api/v1/security/toggle``
Toggles whether or not security measures are on
"""
if not client.roles.has('admin', netid):
abort(403)
try:
status = client.security.toggle_security()
except Exception:
return jsonify({'error': ['an error occurred while toggling security']}), 500
return jsonify({
'status': status
}), 200
@bp.route('/get_status', methods=['GET'])
@require_login
def get_security_status(netid: str, client: ShrunkClient) -> Any:
"""``GET /api/v1/security/get_status``
Checks the status of security measures
"""
if not client.roles.has('admin', netid):
abort(403)
try:
status = client.security.get_security_status()
except Exception:
return jsonify({'error': ['an error occurred while obtaining security status']}), 500
return jsonify({
'status': status
}), 200
| mit | b2c8e1f64cb3446816ff73fd1780ee51 | 30.6 | 97 | 0.664185 | 3.684499 | false | false | false | false |
spectralpython/spectral | spectral/io/bsqfile.py | 1 | 13367 | '''
Code for handling files that are band sequential (BSQ).
'''
from __future__ import absolute_import, division, print_function, unicode_literals
import array
import logging
import numpy as np
import os
import sys
import spectral as spy
from ..utilities.python23 import typecode, tobytes, frombytes
from .spyfile import SpyFile, MemmapFile
byte_typecode = typecode('b')
class BsqFile(SpyFile, MemmapFile):
'''
A class to represent image files stored with bands sequential.
'''
def __init__(self, params, metadata=None):
self.interleave = spy.BSQ
if metadata is None:
metadata = {}
SpyFile.__init__(self, params, metadata)
self._memmap = self._open_memmap('r')
def _open_memmap(self, mode):
logger = logging.getLogger('spectral')
if (os.path.getsize(self.filename) < sys.maxsize):
try:
(R, C, B) = self.shape
return np.memmap(self.filename, dtype=self.dtype, mode=mode,
offset=self.offset, shape=(B, R, C))
except:
logger.debug('Unable to create memmap interface.')
return None
else:
return None
def read_band(self, band, use_memmap=True):
'''Reads a single band from the image.
Arguments:
`band` (int):
Index of band to read.
`use_memmap` (bool, default True):
Specifies whether the file's memmap interface should be used
to read the data. Setting this arg to True only has an effect
if a memmap is being used (i.e., if `img.using_memmap` is True).
Returns:
:class:`numpy.ndarray`
An `MxN` array of values for the specified band.
'''
if self._memmap is not None and use_memmap is True:
data = np.array(self._memmap[band, :, :])
if self.scale_factor != 1:
data = data / float(self.scale_factor)
return data
vals = array.array(byte_typecode)
offset = self.offset + band * self.sample_size * \
self.nrows * self.ncols
f = self.fid
# Pixel format is BSQ, so read the whole band at once.
f.seek(offset, 0)
vals.fromfile(f, self.nrows * self.ncols * self.sample_size)
arr = np.frombuffer(tobytes(vals), dtype=self.dtype)
arr = arr.reshape(self.nrows, self.ncols)
if self.scale_factor != 1:
return arr / float(self.scale_factor)
return arr
def read_bands(self, bands, use_memmap=False):
'''Reads multiple bands from the image.
Arguments:
`bands` (list of ints):
Indices of bands to read.
`use_memmap` (bool, default False):
Specifies whether the file's memmap interface should be used
to read the data. Setting this arg to True only has an effect
if a memmap is being used (i.e., if `img.using_memmap` is True).
Returns:
:class:`numpy.ndarray`
An `MxNxL` array of values for the specified bands. `M` and `N`
are the number of rows & columns in the image and `L` equals
len(`bands`).
'''
if self._memmap is not None and use_memmap is True:
data = np.array(self._memmap[bands, :, :]).transpose((1, 2, 0))
if self.scale_factor != 1:
data = data / float(self.scale_factor)
return data
f = self.fid
arr = np.zeros((self.nrows, self.ncols, len(bands)), dtype=self.dtype)
for j in range(len(bands)):
vals = array.array(byte_typecode)
offset = self.offset + (bands[j]) * self.sample_size \
* self.nrows * self.ncols
# Pixel format is BSQ, so read an entire band at time.
f.seek(offset, 0)
vals.fromfile(f, self.nrows * self.ncols * self.sample_size)
band = np.frombuffer(tobytes(vals), dtype=self.dtype)
arr[:, :, j] = band.reshape(self.nrows, self.ncols)
if self.scale_factor != 1:
return arr / float(self.scale_factor)
return arr
def read_pixel(self, row, col, use_memmap=True):
'''Reads the pixel at position (row,col) from the file.
Arguments:
`row`, `col` (int):
Indices of the row & column for the pixel
`use_memmap` (bool, default True):
Specifies whether the file's memmap interface should be used
to read the data. Setting this arg to True only has an effect
if a memmap is being used (i.e., if `img.using_memmap` is True).
Returns:
:class:`numpy.ndarray`
A length-`B` array, where `B` is the number of image bands.
'''
if self._memmap is not None and use_memmap is True:
data = np.array(self._memmap[:, row, col])
if self.scale_factor != 1:
data = data / float(self.scale_factor)
return data
vals = array.array(byte_typecode)
delta = self.sample_size * (self.nbands - 1)
offset = self.offset + row * self.nbands * self.ncols \
* self.sample_size + col * self.sample_size
f = self.fid
nPixels = self.nrows * self.ncols
ncols = self.ncols
sampleSize = self.sample_size
bandSize = sampleSize * nPixels
rowSize = sampleSize * self.ncols
for i in range(self.nbands):
f.seek(self.offset
+ i * bandSize
+ row * rowSize
+ col * sampleSize, 0)
vals.fromfile(f, sampleSize)
pixel = np.frombuffer(tobytes(vals), dtype=self.dtype)
if self.scale_factor != 1:
return pixel / float(self.scale_factor)
return pixel
def read_subregion(self, row_bounds, col_bounds, bands=None,
use_memmap=True):
'''
Reads a contiguous rectangular sub-region from the image.
Arguments:
`row_bounds` (2-tuple of ints):
(a, b) -> Rows a through b-1 will be read.
`col_bounds` (2-tuple of ints):
(a, b) -> Columnss a through b-1 will be read.
`bands` (list of ints):
Optional list of bands to read. If not specified, all bands
are read.
`use_memmap` (bool, default True):
Specifies whether the file's memmap interface should be used
to read the data. Setting this arg to True only has an effect
if a memmap is being used (i.e., if `img.using_memmap` is True).
Returns:
:class:`numpy.ndarray`
An `MxNxL` array.
'''
if self._memmap is not None and use_memmap is True:
if bands is None:
data = np.array(self._memmap[:, row_bounds[0]: row_bounds[1],
col_bounds[0]: col_bounds[1]])
else:
data = np.array(
self._memmap[bands, row_bounds[0]: row_bounds[1],
col_bounds[0]: col_bounds[1]])
data = data.transpose((1, 2, 0))
if self.scale_factor != 1:
data = data / float(self.scale_factor)
return data
nSubRows = row_bounds[1] - row_bounds[0] # Rows in sub-image
nSubCols = col_bounds[1] - col_bounds[0] # Cols in sub-image
f = self.fid
f.seek(self.offset, 0)
# Increments between bands
if bands is None:
# Read all bands.
bands = list(range(self.nbands))
arr = np.zeros((nSubRows, nSubCols, len(bands)), dtype=self.dtype)
nrows = self.nrows
ncols = self.ncols
sampleSize = self.sample_size
bandSize = nrows * ncols * sampleSize
colStartOffset = col_bounds[0] * sampleSize
rowSize = ncols * sampleSize
rowStartOffset = row_bounds[0] * rowSize
nSubBands = len(bands)
# Pixel format is BSQ
for i in bands:
vals = array.array(byte_typecode)
bandOffset = i * bandSize
for j in range(row_bounds[0], row_bounds[1]):
f.seek(self.offset
+ bandOffset
+ j * rowSize
+ colStartOffset, 0)
vals.fromfile(f, nSubCols * sampleSize)
subArray = np.frombuffer(tobytes(vals),
dtype=self.dtype).reshape((nSubRows,
nSubCols))
arr[:, :, i] = subArray
if self.scale_factor != 1:
return arr / float(self.scale_factor)
return arr
def read_subimage(self, rows, cols, bands=None, use_memmap=False):
'''
Reads arbitrary rows, columns, and bands from the image.
Arguments:
`rows` (list of ints):
Indices of rows to read.
`cols` (list of ints):
Indices of columns to read.
`bands` (list of ints):
Optional list of bands to read. If not specified, all bands
are read.
`use_memmap` (bool, default False):
Specifies whether the file's memmap interface should be used
to read the data. Setting this arg to True only has an effect
if a memmap is being used (i.e., if `img.using_memmap` is True).
Returns:
:class:`numpy.ndarray`
An `MxNxL` array, where `M` = len(`rows`), `N` = len(`cols`),
and `L` = len(bands) (or # of image bands if `bands` == None).
'''
if self._memmap is not None and use_memmap is True:
if bands is None:
data = np.array(self._memmap[:].take(rows, 1).take(cols, 2))
else:
data = np.array(
self._memmap.take(bands, 0).take(rows, 1).take(cols, 2))
data = data.transpose((1, 2, 0))
if self.scale_factor != 1:
data = data / float(self.scale_factor)
return data
nSubRows = len(rows) # Rows in sub-image
nSubCols = len(cols) # Cols in sub-image
d_col = self.sample_size
d_band = d_col * self.ncols
d_row = d_band * self.nbands
f = self.fid
f.seek(self.offset, 0)
# Increments between bands
if bands is None:
# Read all bands.
bands = list(range(self.nbands))
nSubBands = len(bands)
arr = np.zeros((nSubRows, nSubCols, nSubBands), dtype=self.dtype)
offset = self.offset
vals = array.array(byte_typecode)
nrows = self.nrows
ncols = self.ncols
sampleSize = self.sample_size
bandSize = nrows * ncols * sampleSize
sampleSize = self.sample_size
rowSize = ncols * sampleSize
# Pixel format is BSQ
for i in bands:
bandOffset = offset + i * bandSize
for j in rows:
rowOffset = j * rowSize
for k in cols:
f.seek(bandOffset
+ rowOffset
+ k * sampleSize, 0)
vals.fromfile(f, sampleSize)
arr = np.frombuffer(tobytes(vals), dtype=self.dtype)
arr = arr.reshape(nSubBands, nSubRows, nSubCols)
arr = np.transpose(arr, (1, 2, 0))
if self.scale_factor != 1:
return arr / float(self.scale_factor)
return arr
def read_datum(self, i, j, k, use_memmap=True):
'''Reads the band `k` value for pixel at row `i` and column `j`.
Arguments:
`i`, `j`, `k` (integer):
Row, column and band index, respectively.
`use_memmap` (bool, default True):
Specifies whether the file's memmap interface should be used
to read the data. Setting this arg to True only has an effect
if a memmap is being used (i.e., if `img.using_memmap` is True).
Using this function is not an efficient way to iterate over bands or
pixels. For such cases, use readBands or readPixel instead.
'''
if self._memmap is not None and use_memmap is True:
datum = self._memmap[k, i, j]
if self.scale_factor != 1:
datum /= float(self.scale_factor)
return datum
nrows = self.nrows
ncols = self.ncols
sampleSize = self.sample_size
self.fid.seek(self.offset
+ (k * nrows * ncols
+ i * ncols
+ j) * sampleSize, 0)
vals = array.array(byte_typecode)
vals.fromfile(self.fid, sampleSize)
arr = np.frombuffer(tobytes(vals), dtype=self.dtype)
return arr.tolist()[0] / float(self.scale_factor)
| mit | 006c9537c51e4fa6c1424d1e485268e4 | 32.168734 | 82 | 0.524875 | 4.045702 | false | false | false | false |
markstory/lint-review | tests/tools/test_pytype.py | 1 | 4014 | from lintreview.review import Problems, Comment
from lintreview.tools.pytype import Pytype
from unittest import TestCase
from tests import root_dir, requires_image, read_file, read_and_restore_file
class TestPytype(TestCase):
fixtures = [
'tests/fixtures/pytype/no_errors.py',
'tests/fixtures/pytype/has_errors.py',
]
def setUp(self):
self.problems = Problems()
self.tool = Pytype(self.problems, {}, root_dir)
def test_version(self):
assert self.tool.version() != ''
def test_match_file(self):
self.assertFalse(self.tool.match_file('test.php'))
self.assertFalse(self.tool.match_file('test.js'))
self.assertFalse(self.tool.match_file('dir/name/test.js'))
self.assertTrue(self.tool.match_file('test.py'))
self.assertTrue(self.tool.match_file('dir/name/test.py'))
@requires_image('pytype')
def test_process_files__one_file_pass(self):
self.tool.process_files([self.fixtures[0]])
self.assertEqual([], self.problems.all(self.fixtures[0]))
@requires_image('pytype')
def test_process_files__one_file_fail(self):
self.tool.process_files([self.fixtures[1]])
problems = self.problems.all(self.fixtures[1])
self.assertEqual(3, len(problems))
fname = self.fixtures[1]
expected = Comment(
fname, 6, 6,
"No attribute 'group' on None [attribute-error]"
" In Optional[Match[str]]")
self.assertEqual(expected, problems[0])
expected = Comment(
fname, 9, 9, "Invalid __slot__ entry: '1' [bad-slots]")
self.assertEqual(expected, problems[1])
@requires_image('pytype')
def test_process_files_two_files(self):
self.tool.process_files(self.fixtures)
self.assertEqual([], self.problems.all(self.fixtures[0]))
problems = self.problems.all(self.fixtures[1])
self.assertEqual(3, len(problems))
fname = self.fixtures[1]
expected = Comment(
fname, 6, 6,
"No attribute 'group' on None [attribute-error]"
" In Optional[Match[str]]")
self.assertEqual(expected, problems[0])
expected = Comment(
fname, 9, 9, "Invalid __slot__ entry: '1' [bad-slots]")
self.assertEqual(expected, problems[1])
@requires_image('pytype')
def test_process_files__config_invalid(self):
options = {
'config': 'tests/fixtures/pytype/derp'
}
self.tool = Pytype(self.problems, options, root_dir)
self.tool.process_files([self.fixtures[1]])
problems = self.problems.all()
self.assertEqual(1, len(problems))
self.assertIn('Pytype failed', problems[0].body)
self.assertIn('config file', problems[0].body)
@requires_image('pytype')
def test_process_files__config(self):
options = {
'config': 'tests/fixtures/pytype/pytype.ini'
}
self.tool = Pytype(self.problems, options, root_dir)
self.tool.process_files([self.fixtures[1]])
problems = self.problems.all(self.fixtures[1])
self.assertEqual(2, len(problems))
for p in problems:
self.assertNotIn('attribute-error', p.body)
def test_has_fixer(self):
tool = Pytype(self.problems, {}, root_dir)
self.assertEqual(False, tool.has_fixer())
def test_has_fixer__enabled(self):
tool = Pytype(self.problems, {'fixer': True}, root_dir)
self.assertEqual(True, tool.has_fixer())
@requires_image('pytype')
def test_run_fixer(self):
tool = Pytype(self.problems, {'fixer': True}, root_dir)
original = read_file(self.fixtures[1])
tool.execute_fixer(self.fixtures)
updated = read_and_restore_file(self.fixtures[1], original)
assert original != updated, 'File content should change.'
self.assertEqual(0, len(self.problems.all()),
'No errors should be recorded')
| mit | 7cc128a65ee8bd8f9280c2dbc1788d84 | 34.839286 | 76 | 0.616094 | 3.702952 | false | true | false | false |
markstory/lint-review | lintreview/tools/yamllint.py | 1 | 1718 | import os
import lintreview.docker as docker
from lintreview.review import IssueComment
from lintreview.tools import Tool, process_quickfix, extract_version
class Yamllint(Tool):
name = 'yamllint'
def version(self):
output = docker.run('python2', ['yamllint', '--version'], self.base_path)
return extract_version(output)
def check_dependencies(self):
"""
See if python2 image is installed
"""
return docker.image_exists('python2')
def match_file(self, filename):
base = os.path.basename(filename)
name, ext = os.path.splitext(base)
return ext in ['.yml', '.yaml']
def process_files(self, files):
"""
Run code checks with yamllint.
Only a single process is made for all files
to save resources.
Configuration is not supported at this time
"""
command = ['yamllint', '--format=parsable']
# Add config file if its present
if self.options.get('config'):
command += [
'-c',
docker.apply_base(self.options['config'])
]
command += files
output = docker.run('python2', command, self.base_path)
if not output:
return False
if 'No such file' in output and 'Traceback' in output:
error = output.strip().split("\n")[-1]
msg = (u'`yamllint` failed with the following error:\n'
'```\n'
'{}\n'
'```\n')
return self.problems.add(IssueComment(msg.format(error)))
output = output.split("\n")
process_quickfix(self.problems, output, docker.strip_base)
| mit | 51edd417a307b3fc2fa073a6d34d3afe | 29.140351 | 81 | 0.566356 | 4.139759 | false | true | false | false |
markstory/lint-review | tests/test_processor.py | 1 | 6893 | from unittest import TestCase
from mock import patch, sentinel, ANY
import json
import responses
from lintreview.config import build_review_config
from lintreview.diff import DiffCollection
from lintreview.processor import Processor
from lintreview.fixers.error import ConfigurationError, WorkflowError
from . import load_fixture, test_dir, requires_image, fixer_ini, create_repo
app_config = {
'GITHUB_OAUTH_TOKEN': 'fake-token',
'GITHUB_AUTHOR_NAME': 'bot',
'GITHUB_AUTHOR_EMAIL': 'bot@example.com',
'SUMMARY_THRESHOLD': 50,
}
class TestProcessor(TestCase):
def setUp(self):
self.tool_patcher = patch('lintreview.processor.tools')
self.tool_stub = self.tool_patcher.start()
self.fixer_patcher = patch('lintreview.processor.fixers')
self.fixer_stub = self.fixer_patcher.start()
def tearDown(self):
self.tool_patcher.stop()
self.fixer_patcher.stop()
@responses.activate
def test_load_changes(self):
repo = create_repo()
pull = repo.pull_request(1)
config = build_review_config('', app_config)
subject = Processor(repo, pull, './tests', config)
subject.load_changes()
assert subject._changes
assert isinstance(subject._changes, DiffCollection)
assert 1 == len(subject._changes), 'File count is wrong'
@responses.activate
def test_run_tools__no_changes(self):
repo = create_repo()
pull = repo.pull_request(1)
config = build_review_config('', app_config)
subject = Processor(repo, pull, './tests', config)
self.assertRaises(RuntimeError,
subject.run_tools)
@responses.activate
def test_run_tools__import_error(self):
self.tool_patcher.stop()
repo = create_repo()
pull = repo.pull_request(1)
ini = """
[tools]
linters = nope
"""
config = build_review_config(ini, app_config)
subject = Processor(repo, pull, './tests', config)
subject.load_changes()
subject.run_tools()
self.tool_patcher.start()
problems = subject.problems.all()
assert len(problems) == 1
assert 'could not load linters' in problems[0].body
@responses.activate
def test_run_tools__ignore_patterns(self):
repo = create_repo()
pull = repo.pull_request(1)
config = build_review_config(fixer_ini, app_config)
config.ignore_patterns = lambda: ['View/Helper/*']
subject = Processor(repo, pull, './tests', config)
subject.load_changes()
subject.run_tools()
self.tool_stub.run.assert_called_with(
ANY,
[],
ANY
)
@responses.activate
def test_run_tools__execute_fixers(self):
repo = create_repo()
pull = repo.pull_request(1)
self.tool_stub.factory.return_value = sentinel.tools
self.fixer_stub.create_context.return_value = sentinel.context
self.fixer_stub.run_fixers.return_value = sentinel.diff
config = build_review_config(fixer_ini, app_config)
subject = Processor(repo, pull, './tests', config)
subject.load_changes()
subject.run_tools()
file_path = 'View/Helper/AssetCompressHelper.php'
self.fixer_stub.create_context.assert_called_with(
config,
'./tests',
repo,
pull
)
self.fixer_stub.run_fixers.assert_called_with(
sentinel.tools,
'./tests',
[file_path]
)
self.fixer_stub.apply_fixer_diff.assert_called_with(
subject._changes,
sentinel.diff,
sentinel.context
)
self.tool_stub.run.assert_called()
@responses.activate
def test_run_tools__execute_fixers_fail(self):
repo = create_repo()
pull = repo.pull_request(1)
self.tool_stub.factory.return_value = sentinel.tools
self.fixer_stub.create_context.return_value = sentinel.context
self.fixer_stub.run_fixers.side_effect = RuntimeError
config = build_review_config(fixer_ini, app_config)
subject = Processor(repo, pull, './tests', config)
subject.load_changes()
subject.run_tools()
self.fixer_stub.create_context.assert_called()
self.fixer_stub.run_fixers.assert_called()
self.fixer_stub.apply_fixer_diff.assert_not_called()
self.fixer_stub.rollback_changes.assert_called()
self.tool_stub.run_assert_called()
@responses.activate
def test_run_tools_fixer_error_scenario(self):
errors = [
WorkflowError('A bad workflow thing'),
ConfigurationError('A bad configuration thing'),
]
for error in errors:
self.tool_stub.reset()
self.fixer_stub.reset()
self._test_run_tools_fixer_error_scenario(error)
def _test_run_tools_fixer_error_scenario(self, error):
repo = create_repo()
pull = repo.pull_request(1)
self.tool_stub.factory.return_value = sentinel.tools
self.fixer_stub.create_context.return_value = sentinel.context
self.fixer_stub.apply_fixer_diff.side_effect = error
config = build_review_config(fixer_ini, app_config)
subject = Processor(repo, pull, './tests', config)
subject.load_changes()
subject.run_tools()
self.fixer_stub.create_context.assert_called()
self.fixer_stub.run_fixers.assert_called()
self.tool_stub.run.assert_called()
self.fixer_stub.rollback_changes.assert_called_with('./tests', pull.head)
assert subject.problems
assert 1 == len(subject.problems), 'strategy error adds pull comment'
assert 0 == subject.problems.error_count(), 'fixer failure should be info level'
assert 'Unable to apply fixers. ' + str(error) == subject.problems.all()[0].body
assert 1 == len(subject.problems), 'strategy error adds pull comment'
class TestProcessorIntegration(TestCase):
@responses.activate
@requires_image('php')
def test_execute__integration(self):
repo = create_repo()
pull = repo.pull_request(1)
file_url = 'https://api.github.com/repos/markstory/lint-test/pulls/1/files'
responses.replace(
responses.GET,
file_url,
json=json.loads(load_fixture('integration_test_files.json'))
)
responses.add_passthru('http+docker://localhost')
config = """
[tools]
linters = phpcs
"""
config = build_review_config(config, app_config)
subject = Processor(repo, pull, test_dir, config)
subject.load_changes()
review, problems = subject.execute()
assert review is not None
assert problems is not None
assert len(problems) > 0
| mit | 88d28546799061a2e7aa458bcb117be1 | 31.060465 | 88 | 0.622806 | 3.916477 | false | true | false | false |
markstory/lint-review | tests/test_diff.py | 1 | 14939 | import re
from unittest import TestCase
from mock import patch
from . import load_fixture, create_pull_files
from lintreview.diff import DiffCollection, Diff, parse_diff, ParseError
class TestDiffCollection(TestCase):
# Single file, single commit
one_file = load_fixture('diff/one_file_pull_request.txt')
# Two files modified in the same commit
two_files = load_fixture('diff/two_file_pull_request.txt')
# Diff with renamed files
renamed_files = load_fixture('diff/diff_with_rename_and_blob.txt')
# Diff with removed files
removed_files = load_fixture('diff/diff_with_removed_files.txt')
single_line_add = load_fixture('diff/diff_single_line_add.txt')
new_empty_file = load_fixture('diff/new_empty_file.txt')
one_file_json = load_fixture('one_file_pull_request.json')
def test_constructor_one_file(self):
patches = create_pull_files(self.one_file_json)
changes = DiffCollection(patches)
self.assertEqual(1, len(changes))
self.assert_instances(changes, 1, Diff)
def test_create_one_element(self):
changes = parse_diff(self.one_file)
self.assertEqual(1, len(changes))
self.assert_instances(changes, 1, Diff)
def test_create_two_files(self):
changes = parse_diff(self.two_files)
self.assertEqual(2, len(changes))
self.assert_instances(changes, 2, Diff)
def test_get_files__one_file(self):
changes = parse_diff(self.one_file)
result = changes.get_files()
expected = [
"View/Helper/AssetCompressHelper.php"
]
self.assertEqual(expected, result)
def test_get_files__two_files(self):
changes = parse_diff(self.two_files)
result = changes.get_files()
expected = [
"Console/Command/Task/AssetBuildTask.php",
"Test/test_files/View/Parse/single.ctp",
]
self.assertEqual(expected, result)
def test_get_files__two_files__ignore_pattern(self):
changes = parse_diff(self.two_files)
expected = [
"Console/Command/Task/AssetBuildTask.php",
]
ignore = ['Test/**', None, False]
result = changes.get_files(ignore_patterns=ignore)
self.assertEqual(expected, result)
def test_get_files__ignore_pattern__multiple_wildcard(self):
data = load_fixture('diff/multiple_wildcard_pull_request.txt')
changes = parse_diff(data)
expected = [
"buildpacks/buildpack-ruby/tests/ruby-sinatra/test_web.rb",
]
ignore = ['buildpacks/*/tests/*/test.sh']
result = changes.get_files(ignore_patterns=ignore)
self.assertEqual(expected, result)
def test_has_line_changed__no_file(self):
changes = parse_diff(self.two_files)
self.assertFalse(changes.has_line_changed('derp', 99))
def test_has_line_changed__no_line(self):
changes = parse_diff(self.two_files)
self.assertFalse(changes.has_line_changed(
'Console/Command/Task/AssetBuildTask.php',
99999))
def test_has_line_changed__two_files(self):
changes = parse_diff(self.two_files)
filename = 'Console/Command/Task/AssetBuildTask.php'
# True for additions
self.assertTrue(changes.has_line_changed(filename, 117))
self.assertTrue(changes.has_line_changed(filename, 119))
# Should return false if the line was a deletion
self.assertFalse(changes.has_line_changed(filename, 148))
# Should return false for unchanged
self.assertFalse(changes.has_line_changed(filename, 145))
def test_has_line_changed__single_line(self):
filename = 'some.js'
changes = parse_diff(self.single_line_add)
self.assertTrue(changes.has_line_changed(filename, 1))
self.assertFalse(changes.has_line_changed(filename, 0))
self.assertFalse(changes.has_line_changed(filename, 2))
def test_parse_diff_empty_file(self):
changes = parse_diff(self.new_empty_file)
self.assertEqual(1, len(changes))
self.assert_instances(changes, 1, Diff)
assert changes[0].filename == 'app/models.py'
def test_parsing_diffs_removed__file(self):
changes = parse_diff(self.removed_files)
self.assertEqual(0, len(changes),
'Should be no files as the file was removed')
self.assertEqual([], changes.get_files())
def test_parsing_diffs__renamed_file_and_blob(self):
changes = parse_diff(self.renamed_files)
assert len(changes) == 0, 'Should be no files as a blob and renames happened'
assert [] == changes.get_files()
@patch('lintreview.diff.log')
def test_parsing_diffs__renamed_file_and_blob_no_log(self, log):
diff = parse_diff(self.renamed_files)
assert len(diff) == 0
self.assertEqual(False, log.warn.called)
self.assertEqual(False, log.error.called)
def test_parse_diff__no_input(self):
diff = parse_diff('')
assert len(diff) == 0
def test_parse_diff__changed_lines_parsed(self):
data = load_fixture('diff/one_file.txt')
out = parse_diff(data)
assert isinstance(out, DiffCollection)
change = out.all_changes('tests/test_diff.py')
self.assertEqual(1, len(change))
expected = set([6, 9, 10, 55])
self.assertEqual(expected, change[0].deleted_lines())
def test_parse_diff__multiple_files(self):
data = load_fixture('diff/two_files.txt')
out = parse_diff(data)
self.assertEqual(2, len(out))
self.assertEqual(['lintreview/git.py', 'tests/test_git.py'],
out.get_files())
for change in out:
assert change.filename, 'has a filename'
assert change.commit is None, 'No commit'
self.assertNotIn('git --diff', change.patch)
self.assertNotIn('index', change.patch)
self.assertNotIn('--- a', change.patch)
self.assertNotIn('+++ b', change.patch)
self.assertIn('@@', change.patch)
change = out.all_changes('tests/test_git.py')[0]
self.assertEqual({205, 206, 207, 208, 209, 210, 211, 212, 213},
change.added_lines())
def test_parse_diff__bad_input(self):
data = """diff --git a/app/models.py b/app/models.py
index fa9a814..769886c 100644
--- a/app/models.py
+++ b/app/models.py"""
with self.assertRaises(ParseError) as ctx:
parse_diff(data)
self.assertIn('Could not parse', str(ctx.exception))
def test_first_changed_line(self):
changes = parse_diff(self.two_files)
filename = 'Console/Command/Task/AssetBuildTask.php'
assert changes.first_changed_line('not there') is None
assert changes.first_changed_line(filename) == 117
filename = "Test/test_files/View/Parse/single.ctp"
assert changes.first_changed_line(filename) == 3
def assert_instances(self, collection, count, clazz):
"""
Helper for checking a collection.
"""
num = 0
for item in collection:
num += 1
assert isinstance(item, clazz)
self.assertEqual(count, num)
class TestDiff(TestCase):
one_file = load_fixture('diff/one_file_pull_request.txt')
two_files = load_fixture('diff/two_file_pull_request.txt')
# Block offset so lines don't match offsets
block_offset = load_fixture('diff/pull_request_line_offset.txt')
def setUp(self):
diffs = parse_diff(self.one_file)
self.diff = diffs[0]
def test_parse_diff__headers_removed(self):
data = load_fixture('diff/one_file.txt')
out = parse_diff(data)
assert isinstance(out, DiffCollection)
self.assertEqual(1, len(out))
self.assertEqual(['tests/test_diff.py'], out.get_files())
change = out.all_changes('tests/test_diff.py')
self.assertEqual(1, len(change))
self.assertEqual('tests/test_diff.py', change[0].filename)
self.assertEqual(None, change[0].commit,
'No commit as changes are just a diff')
# Make sure git diff headers are not in patch
self.assertNotIn('git --diff', change[0].patch)
self.assertNotIn('index', change[0].patch)
self.assertNotIn('--- a', change[0].patch)
self.assertNotIn('+++ b', change[0].patch)
self.assertIn('@@', change[0].patch)
def test_filename(self):
self.assertEqual("View/Helper/AssetCompressHelper.php",
self.diff.filename)
def test_patch_property(self):
diff = parse_diff(self.one_file)[0]
assert diff.patch in self.one_file
def test_as_diff__one_hunk(self):
data = load_fixture('diff/no_intersect_updated.txt')
diff = parse_diff(data)[0]
# Method results don't include index line.
data = re.sub(r'^index.*?\n', '', data, 0, re.M)
self.assertEqual(data, diff.as_diff())
def test_as_diff__multi_hunk(self):
data = load_fixture('diff/inset_hunks_updated.txt')
diff = parse_diff(data)[0]
# Method results don't include index line.
data = re.sub(r'^index.*?\n', '', data, 0, re.M)
self.assertEqual(data, diff.as_diff())
def test_has_line_changed__no_line(self):
self.assertFalse(self.diff.has_line_changed(None))
def test_has_line_changed__added_only(self):
# Check start and end of range
self.assertTrue(self.diff.has_line_changed(454))
self.assertTrue(self.diff.has_line_changed(464))
def test_has_line_changed__not_find_deletes(self):
diff = parse_diff(self.two_files)[0]
self.assertTrue(diff.has_line_changed(117))
# No unchanged lines.
self.assertFalse(diff.has_line_changed(118))
self.assertTrue(diff.has_line_changed(119))
# No deleted lines.
self.assertFalse(diff.has_line_changed(147))
self.assertFalse(diff.has_line_changed(148))
def test_has_line_changed__blocks_offset(self):
diff = parse_diff(self.block_offset)[0]
self.assertTrue(diff.has_line_changed(32))
self.assertEqual(26, diff.line_position(23))
self.assertEqual(40, diff.line_position(32))
def test_added_lines(self):
diff = parse_diff(self.two_files)[0]
adds = diff.added_lines()
self.assertEqual(2, len(adds), 'incorrect addition length')
self.assertEqual(set([117, 119]), adds, 'added line numbers are wrong')
def test_deleted_lines(self):
diff = parse_diff(self.two_files)[0]
dels = diff.deleted_lines()
self.assertEqual(3, len(dels), 'incorrect deleted length')
self.assertEqual(set([117, 119, 148]), dels,
'deleted line numbers are wrong')
overlap = diff.added_lines().intersection(diff.deleted_lines())
self.assertEqual(set([117, 119]), overlap)
def test_hunk_parsing(self):
diff = parse_diff(self.two_files)[0]
hunks = diff.hunks
self.assertEqual(2, len(hunks))
expected = set([117, 119])
self.assertEqual(expected, hunks[0].added_lines())
self.assertEqual(expected, hunks[0].deleted_lines())
self.assertEqual(expected, diff.added_lines())
self.assertEqual(set([]), hunks[1].added_lines())
self.assertEqual(set([148]), hunks[1].deleted_lines())
self.assertEqual(set([117, 119, 148]), diff.deleted_lines())
self.assertEqual(diff.line_position(117), hunks[0].line_position(117))
self.assertEqual(diff.line_position(119), hunks[0].line_position(119))
def test_construct_with_hunks_kwarg(self):
proto = parse_diff(self.two_files)[0]
diff = Diff(None, proto.filename, None, hunks=proto.hunks)
self.assertEqual(len(diff.hunks), len(proto.hunks))
self.assertEqual(diff.hunks[0].patch, proto.hunks[0].patch)
def test_construct_with_empty_hunks_kwarg(self):
diff = Diff(None, 'test.py', 'abc123', hunks=[])
self.assertEqual(0, len(diff.hunks))
def test_intersection__simple(self):
# These two diffs should fully overlap as
# the updated diff hunks touch the original hunks.
original = load_fixture('diff/intersecting_hunks_original.txt')
updated = load_fixture('diff/intersecting_hunks_updated.txt')
original = parse_diff(original)[0]
updated = parse_diff(updated)[0]
intersecting = updated.intersection(original)
self.assertEqual(4, len(updated.hunks))
self.assertEqual(4, len(intersecting))
def test_intersection__no_intersect(self):
# Diffs have no overlap as updated appends lines.
original = load_fixture('diff/no_intersect_original.txt')
updated = load_fixture('diff/no_intersect_updated.txt')
original = parse_diff(original)[0]
updated = parse_diff(updated)[0]
intersecting = updated.intersection(original)
self.assertEqual(1, len(updated.hunks))
self.assertEqual(0, len(intersecting))
def test_intersection__inset_hunks(self):
# Updated contains two hunks inside original's changes
original = load_fixture('diff/inset_hunks_original.txt')
updated = load_fixture('diff/inset_hunks_updated.txt')
original = parse_diff(original)[0]
updated = parse_diff(updated)[0]
intersecting = updated.intersection(original)
self.assertEqual(2, len(updated.hunks))
self.assertEqual(2, len(intersecting))
def test_intersection__staggered_hunks(self):
# Updated contains a big hunk in the middle that pushes
# the original section down. The bottom hunk of updated
# should overlap
original = load_fixture('diff/staggered_original.txt')
updated = load_fixture('diff/staggered_updated.txt')
original = parse_diff(original)[0]
updated = parse_diff(updated)[0]
intersecting = updated.intersection(original)
self.assertEqual(2, len(updated.hunks))
self.assertEqual(2, len(intersecting))
def test_intersection__adjacent(self):
# Updated contains a two hunks that partially overlap
# both should be included.
original = load_fixture('diff/adjacent_original.txt')
updated = load_fixture('diff/adjacent_updated.txt')
original = parse_diff(original)[0]
updated = parse_diff(updated)[0]
intersecting = updated.intersection(original)
self.assertEqual(2, len(intersecting))
def test_first_changed_line(self):
assert self.diff.first_changed_line() == 454
data = load_fixture('diff/one_file.txt')
out = parse_diff(data)
change = out.all_changes('tests/test_diff.py')
assert change[0].first_changed_line() == 6
| mit | 8c7fa142a5ff8e37077a5e68ab0ca9d0 | 36.916244 | 85 | 0.633108 | 3.694115 | false | true | false | false |
markstory/lint-review | lintreview/cli.py | 1 | 3417 | import argparse
import lintreview.github as github
import sys
from flask import url_for
from lintreview.web import app
def main():
parser = create_parser()
args = parser.parse_args()
args.func(args)
def register_hook(args):
try:
process_hook(github.register_hook, args)
sys.stdout.write('Hook registered successfully\n')
except Exception as e:
sys.stderr.write('Hook registration failed\n')
sys.stderr.write(e.message + '\n')
sys.exit(2)
def remove_hook(args):
try:
process_hook(github.unregister_hook, args)
sys.stdout.write('Hook removed successfully\n')
except Exception as e:
sys.stderr.write('Hook removal failed\n')
sys.stderr.write(e.message + '\n')
sys.exit(2)
def process_hook(func, args):
"""
Generic helper for processing hook commands.
"""
credentials = None
if args.login_user:
credentials = {
'GITHUB_OAUTH_TOKEN': args.login_user,
}
with app.app_context():
if credentials:
credentials['GITHUB_URL'] = app.config['GITHUB_URL']
repo = github.get_repository(
credentials,
args.user,
args.repo)
else:
repo = github.get_repository(
app.config,
args.user,
args.repo)
endpoint = url_for('start_review', _external=True)
func(repo, endpoint)
def create_parser():
desc = """
Command line utilities for lintreview.
"""
parser = argparse.ArgumentParser(description=desc)
commands = parser.add_subparsers(
title="Subcommands",
description="Valid subcommands")
desc = """
Register webhooks for a given user & repo
The installed webhook will be used to trigger lint
reviews as pull requests are opened/updated.
"""
register = commands.add_parser('register', help=desc)
register.add_argument(
'-u',
'--user',
dest='login_user',
help="The user that has admin rights to the repo "
"you are adding hooks to. Useful when the user "
"in settings is not the administrator of "
"your repositories.")
register.add_argument(
'-p',
'--password',
dest='login_pass',
help="The password of the admin user.")
register.add_argument('user',
help="The user or organization the repo is under.")
register.add_argument('repo',
help="The repository to install a hook into.")
register.set_defaults(func=register_hook)
desc = """
Unregister webhooks for a given user & repo.
"""
remove = commands.add_parser('unregister', help=desc)
remove.add_argument(
'-u', '--user',
dest='login_user',
help="The OAuth token of the user that has admin rights to the repo "
"you are removing hooks from. Useful when the "
"user in settings is not the administrator of "
"your repositories.")
remove.add_argument('user',
help="The user or organization the repo is under.")
remove.add_argument('repo',
help="The repository to remove a hook from.")
remove.set_defaults(func=remove_hook)
return parser
if __name__ == '__main__':
main()
| mit | 085ee6f0a7ef1baa6a9877c17b74d3a1 | 28.205128 | 77 | 0.585601 | 4.287327 | false | false | false | false |
markstory/lint-review | lintreview/tools/goodcheck.py | 1 | 2306 | import logging
import json
import lintreview.docker as docker
from lintreview.tools import Tool
log = logging.getLogger(__name__)
class Goodcheck(Tool):
name = 'goodcheck'
def check_dependencies(self):
"""
See if ruby image exists
"""
return docker.image_exists('ruby2')
def process_files(self, files):
"""
Run checks with goodcheck
"""
command = self._create_command()
command += files
output = docker.run('ruby2', command, self.base_path)
# The last line should contain a JSON document with results
# from goodcheck
self._process_output(output.strip().split("\n")[-1])
def _create_command(self):
command = ['goodcheck', 'check', '--format', 'json']
if self.options.get('rules'):
for rule in self.options['rules'].split(','):
command.extend(['-R', rule.strip()])
if self.options.get('config'):
command.extend(['--config',
docker.apply_base(self.options['config'])])
return command
def _process_output(self, output):
"""
Process goodcheck json results.
Where `output` is a line containing check results, formatted like:
[{"rule_id":"<id>","path":"<filename>",
"location":{"start_line":<line>,"start_column":<col>,
"end_line":<endline>,"end_column":<endcol>},
"message":"<message>",
"justifications":[]}]
"""
try:
results = json.loads(output)
except ValueError:
log.debug('Failed to load JSON data from goodcheck output %r',
output)
results = []
for result in results:
filename = docker.strip_base(result['path'])
comment = result['message']
add_justifications = self.options.get(
'add_justifications_to_comments', False)
if (result['justifications'] and add_justifications):
comment += "\n\n - " + "\n - ".join(result['justifications'])
self.problems.add(filename,
line=int(result['location']['start_line']),
body=comment)
| mit | 9058bdaea3bf4c0ec7f06c3afc473e2f | 31.942857 | 77 | 0.533391 | 4.460348 | false | false | false | false |
markstory/lint-review | tests/tools/test_foodcritic.py | 1 | 1733 | import os
from lintreview.review import Comment, Problems
from lintreview.tools.foodcritic import Foodcritic
from unittest import TestCase
from tests import root_dir, requires_image
class TestFoodcritic(TestCase):
fixtures = [
'tests/fixtures/foodcritic/noerrors',
'tests/fixtures/foodcritic/errors',
]
def setUp(self):
self.problems = Problems()
def test_version(self):
self.tool = Foodcritic(self.problems, {}, root_dir)
assert self.tool.version() != ''
@requires_image('ruby2')
def test_process_cookbook_pass__no_path(self):
self.tool = Foodcritic(self.problems,
{},
os.path.join(root_dir, self.fixtures[0]))
self.tool.process_files(None)
self.assertEqual([], self.problems.all())
@requires_image('ruby2')
def test_process_cookbook_pass(self):
self.tool = Foodcritic(self.problems,
{'path': self.fixtures[0]},
root_dir)
self.tool.process_files(None)
self.assertEqual([], self.problems.all())
@requires_image('ruby2')
def test_process_cookbook_fail(self):
self.tool = Foodcritic(self.problems,
{'path': self.fixtures[1]},
root_dir)
self.tool.process_files(None)
problems = self.problems.all()
self.assertEqual(5, len(problems))
expected = Comment(
'tests/fixtures/foodcritic/errors/recipes/apache2.rb', 1, 1,
'FC007: Ensure recipe dependencies are reflected in cookbook '
'metadata')
self.assertEqual(expected, problems[1])
| mit | e238517ea3e1d37d0fd0af65c7200540 | 32.980392 | 74 | 0.583381 | 4.020882 | false | true | false | false |
kivy/plyer | plyer/platforms/win/audio.py | 1 | 9787 | '''
Documentation:
http://docs.microsoft.com/en-us/windows/desktop/Multimedia
.. versionadded:: 1.4.0
'''
from os.path import join
from ctypes import windll
from ctypes import (
sizeof, c_void_p, c_ulonglong, c_ulong,
c_wchar_p, byref, Structure, create_string_buffer
)
from ctypes.wintypes import DWORD, UINT
from plyer.facades import Audio
from plyer.platforms.win.storagepath import WinStoragePath
# DWORD_PTR i.e. ULONG_PTR, 32/64bit
ULONG_PTR = c_ulonglong if sizeof(c_void_p) == 8 else c_ulong
# device specific symbols
MCI_OPEN = 0x803
MCI_OPEN_TYPE = 0x2000
MCI_OPEN_ELEMENT = 512
MCI_RECORD = 0x80F
MCI_STOP = 0x808
MCI_SAVE = 0x813
MCI_PLAY = 0x806
MCI_CLOSE = 0x804
# recorder specific symbols
MCI_FROM = 4
MCI_TO = 8
MCI_WAIT = 2
MCI_SAVE_FILE = 256
class MCI_OPEN_PARMS(Structure):
'''
Struct for MCI_OPEN message parameters.
.. versionadded:: 1.4.0
'''
_fields_ = [
('mciOpenParms', ULONG_PTR),
('wDeviceID', UINT),
('lpstrDeviceType', c_wchar_p),
('lpstrElementName', c_wchar_p),
('lpstrAlias', c_wchar_p)
]
class MCI_RECORD_PARMS(Structure):
'''
Struct for MCI_RECORD message parameters.
http://docs.microsoft.com/en-us/windows/desktop/Multimedia/mci-record-parms
.. versionadded:: 1.4.0
'''
_fields_ = [
('dwCallback', ULONG_PTR),
('dwFrom', DWORD),
('dwTo', DWORD)
]
class MCI_SAVE_PARMS(Structure):
'''
Struct for MCI_SAVE message parameters.
http://docs.microsoft.com/en-us/windows/desktop/Multimedia/mci-save-parms
.. versionadded:: 1.4.0
'''
_fields_ = [
('dwCallback', ULONG_PTR),
('lpfilename', c_wchar_p)
]
class MCI_PLAY_PARMS(Structure):
'''
Struct for MCI_PLAY message parameters.
http://docs.microsoft.com/en-us/windows/desktop/Multimedia/mci-play-parms
.. versionadded:: 1.4.0
'''
_fields_ = [
('dwCallback', ULONG_PTR),
('dwFrom', DWORD),
('dwTo', DWORD)
]
def send_command(device, msg, flags, params):
'''
Generic mciSendCommandW() wrapper with error handler.
All parameters are required as for mciSendCommandW().
In case of no `params` passed, use `None`, that value
won't be dereferenced.
.. versionadded:: 1.4.0
'''
multimedia = windll.winmm
send_command_w = multimedia.mciSendCommandW
get_error = multimedia.mciGetErrorStringW
# error text buffer
# by API specification 128 is max, however the API sometimes
# kind of does not respect the documented bounds and returns
# more characters than buffer length...?!
error_len = 128
# big enough to prevent API accidentally segfaulting
error_text = create_string_buffer(error_len * 2)
# open a recording device with a new file
error_code = send_command_w(
device, # device ID
msg,
flags,
# reference to parameters structure or original value
# in case of params=False/0/None/...
byref(params) if params else params
)
# handle error messages if any
if error_code:
# device did not open, raise an exception
get_error(error_code, byref(error_text), error_len)
error_text = error_text.raw.replace(b'\x00', b'').decode('utf-8')
# either it can close already open device or it will fail because
# the device is in non-closable state, but the end result is the same
# and it makes no sense to parse MCI_CLOSE's error in this case
send_command_w(device, MCI_CLOSE, 0, None)
raise Exception(error_code, error_text)
# return params struct because some commands write into it
# to pass some values out of the local function scope
return params
class WinRecorder:
'''
Generic wrapper for MCI_RECORD handling the filenames and device closing
in the same approach like it is used for other platforms.
.. versionadded:: 1.4.0
'''
def __init__(self, device, filename):
self._device = device
self._filename = filename
@property
def device(self):
'''
Public property returning device ID.
.. versionadded:: 1.4.0
'''
return self._device
@property
def filename(self):
'''
Public property returning filename for current recording.
.. versionadded:: 1.4.0
'''
return self._filename
def record(self):
'''
Start recording a WAV sound.
.. versionadded:: 1.4.0
'''
send_command(
device=self.device,
msg=MCI_RECORD,
flags=0,
params=None
)
def stop(self):
'''
Stop recording and save the data to a file path
self.filename. Wait until the file is written.
Close the device afterwards.
.. versionadded:: 1.4.0
'''
# stop the recording first
send_command(
device=self.device,
msg=MCI_STOP,
flags=MCI_WAIT,
params=None
)
# choose filename for the WAV file
save_params = MCI_SAVE_PARMS()
save_params.lpfilename = self.filename
# save the sound data to a file and wait
# until it ends writing to the file
send_command(
device=self.device,
msg=MCI_SAVE,
flags=MCI_SAVE_FILE | MCI_WAIT,
params=save_params
)
# close the recording device
send_command(
device=self.device,
msg=MCI_CLOSE,
flags=0,
params=None
)
class WinPlayer:
'''
Generic wrapper for MCI_PLAY handling the device closing.
.. versionadded:: 1.4.0
'''
def __init__(self, device):
self._device = device
@property
def device(self):
'''
Public property returning device ID.
.. versionadded:: 1.4.0
'''
return self._device
def play(self):
'''
Start playing a WAV sound.
.. versionadded:: 1.4.0
'''
play_params = MCI_PLAY_PARMS()
play_params.dwFrom = 0
send_command(
device=self.device,
msg=MCI_PLAY,
flags=MCI_FROM,
params=play_params
)
def stop(self):
'''
Stop playing a WAV sound and close the device.
.. versionadded:: 1.4.0
'''
send_command(
device=self.device,
msg=MCI_STOP,
flags=MCI_WAIT,
params=None
)
# close the playing device
send_command(
device=self.device,
msg=MCI_CLOSE,
flags=0,
params=None
)
class WinAudio(Audio):
'''
Windows implementation of audio recording and audio playing.
.. versionadded:: 1.4.0
'''
def __init__(self, file_path=None):
# default path unless specified otherwise
default_path = join(
WinStoragePath().get_music_dir(),
'audio.wav'
)
super().__init__(file_path or default_path)
self._recorder = None
self._player = None
self._current_file = None
def _start(self):
'''
Start recording a WAV sound in the background asynchronously.
.. versionadded:: 1.4.0
'''
# clean everything before recording in case
# there is a different device open
self._stop()
# create structure and set device parameters
open_params = MCI_OPEN_PARMS()
open_params.lpstrDeviceType = 'waveaudio'
open_params.lpstrElementName = ''
# open a new device for recording
open_params = send_command(
device=0, # device ID before opening
msg=MCI_OPEN,
# empty filename in lpstrElementName
# device type in lpstrDeviceType
flags=MCI_OPEN_ELEMENT | MCI_OPEN_TYPE,
params=open_params
)
# get recorder with device id and path for saving
self._recorder = WinRecorder(
device=open_params.wDeviceID,
filename=self._file_path
)
self._recorder.record()
# Setting the currently recorded file as current file
# for using it as a parameter in audio player
self._current_file = self._recorder.filename
def _stop(self):
'''
Stop recording or playing of a WAV sound.
.. versionadded:: 1.4.0
'''
if self._recorder:
self._recorder.stop()
self._recorder = None
if self._player:
self._player.stop()
self._player = None
def _play(self):
'''
Play a WAV sound from a file. Prioritize latest recorded file before
default file path from WinAudio.
.. versionadded:: 1.4.0
'''
# create structure and set device parameters
open_params = MCI_OPEN_PARMS()
open_params.lpstrDeviceType = 'waveaudio'
open_params.lpstrElementName = self._current_file or self._file_path
# open a new device for playing
open_params = send_command(
device=0, # device ID before opening
msg=MCI_OPEN,
# existing filename in lpstrElementName
# device type in lpstrDeviceType
flags=MCI_OPEN_ELEMENT | MCI_OPEN_TYPE,
params=open_params
)
# get recorder with device id and path for saving
self._player = WinPlayer(device=open_params.wDeviceID)
self._player.play()
def instance():
'''
Instance for facade proxy.
'''
return WinAudio()
| mit | f894d3712e81fc6770e87657f1ddd6e1 | 23.590452 | 79 | 0.580157 | 3.994694 | false | false | false | false |
kivy/plyer | examples/temperature/main.py | 2 | 1794 | from kivy.app import App
from kivy.clock import Clock
from kivy.lang import Builder
from kivy.properties import NumericProperty
from kivy.properties import ObjectProperty
from kivy.uix.boxlayout import BoxLayout
Builder.load_string('''
#:import temperature plyer.temperature
<TemperatureInterface>:
temperature: temperature
orientation: 'vertical'
padding: '50dp'
spacing: '20dp'
BoxLayout:
orientation: 'horizontal'
size_hint_y: 0.3
Button:
id: button_enable
text: 'Enable'
disabled: False
on_release:
root.enable()
button_disable.disabled = not button_disable.disabled
button_enable.disabled = not button_enable.disabled
Button:
id: button_disable
text: 'Disable'
disabled: True
on_release:
root.disable()
button_disable.disabled = not button_disable.disabled
button_enable.disabled = not button_enable.disabled
Label:
text: 'Current air temperature: ' + str(root.temp) + ' degrees C.'
''')
class TemperatureInterface(BoxLayout):
'''Root Widget.'''
temperature = ObjectProperty()
temp = NumericProperty()
def enable(self):
self.temperature.enable()
Clock.schedule_interval(self.get_temperature, 1 / 20.)
def disable(self):
self.temperature.disable()
Clock.unschedule(self.get_temperature)
def get_temperature(self, dt):
self.temp = self.temperature.temperature or self.temp
class TemperatureApp(App):
def build(self):
return TemperatureInterface()
def on_pause(self):
return True
if __name__ == "__main__":
TemperatureApp().run()
| mit | 3d61017b5a96b2d000e030b2a377f3ce | 25.382353 | 74 | 0.622631 | 4.440594 | false | false | false | false |
kivy/plyer | plyer/utils.py | 1 | 9554 | '''
Utils
=====
'''
__all__ = ('platform', 'reify', 'deprecated')
from os import environ
from os import path
from sys import platform as _sys_platform
import sys
class Platform:
'''
Refactored to class to allow module function to be replaced
with module variable.
'''
def __init__(self):
self._platform_ios = None
self._platform_android = None
def __eq__(self, other):
return other == self._get_platform()
def __ne__(self, other):
return other != self._get_platform()
def __str__(self):
return self._get_platform()
def __repr__(self):
return 'platform name: \'{platform}\' from: \n{instance}'.format(
platform=self._get_platform(),
instance=super().__repr__()
)
def __hash__(self):
return self._get_platform().__hash__()
def _get_platform(self):
if self._platform_android is None:
# sys.getandroidapilevel is defined as of Python 3.7
# ANDROID_ARGUMENT and ANDROID_PRIVATE are 2 environment variables
# from python-for-android project
self._platform_android = hasattr(sys, 'getandroidapilevel') or \
'ANDROID_ARGUMENT' in environ
if self._platform_ios is None:
self._platform_ios = (environ.get('KIVY_BUILD', '') == 'ios')
# On android, _sys_platform return 'linux2', so prefer to check the
# import of Android module than trying to rely on _sys_platform.
if self._platform_android is True:
return 'android'
elif self._platform_ios is True:
return 'ios'
elif _sys_platform in ('win32', 'cygwin'):
return 'win'
elif _sys_platform == 'darwin':
return 'macosx'
elif _sys_platform[:5] == 'linux':
return 'linux'
return 'unknown'
platform = Platform()
class Proxy:
'''
Based on http://code.activestate.com/recipes/496741-object-proxying
version by Tomer Filiba, PSF license.
'''
__slots__ = ['_obj', '_name', '_facade']
def __init__(self, name, facade):
object.__init__(self)
object.__setattr__(self, '_obj', None)
object.__setattr__(self, '_name', name)
object.__setattr__(self, '_facade', facade)
def _ensure_obj(self):
obj = object.__getattribute__(self, '_obj')
if obj:
return obj
# do the import
try:
name = object.__getattribute__(self, '_name')
module = 'plyer.platforms.{}.{}'.format(
platform, name)
mod = __import__(module, fromlist='.')
obj = mod.instance()
except Exception:
import traceback
traceback.print_exc()
facade = object.__getattribute__(self, '_facade')
obj = facade()
object.__setattr__(self, '_obj', obj)
return obj
def __getattribute__(self, name):
result = None
if name == '__doc__':
return result
# run _ensure_obj func, result in _obj
object.__getattribute__(self, '_ensure_obj')()
# return either Proxy instance or platform-dependent implementation
result = getattr(object.__getattribute__(self, '_obj'), name)
return result
def __delattr__(self, name):
object.__getattribute__(self, '_ensure_obj')()
delattr(object.__getattribute__(self, '_obj'), name)
def __setattr__(self, name, value):
object.__getattribute__(self, '_ensure_obj')()
setattr(object.__getattribute__(self, '_obj'), name, value)
def __bool__(self):
object.__getattribute__(self, '_ensure_obj')()
return bool(object.__getattribute__(self, '_obj'))
def __str__(self):
object.__getattribute__(self, '_ensure_obj')()
return str(object.__getattribute__(self, '_obj'))
def __repr__(self):
object.__getattribute__(self, '_ensure_obj')()
return repr(object.__getattribute__(self, '_obj'))
def whereis_exe(program):
''' Tries to find the program on the system path.
Returns the path if it is found or None if it's not found.
'''
path_split = ';' if platform == 'win' else ':'
for pth in environ.get('PATH', '').split(path_split):
folder = path.isdir(path.join(pth, program))
available = path.exists(path.join(pth, program))
if available and not folder:
return path.join(pth, program)
return None
class reify:
'''
Put the result of a method which uses this (non-data) descriptor decorator
in the instance dict after the first call, effectively replacing the
decorator with an instance variable.
It acts like @property, except that the function is only ever called once;
after that, the value is cached as a regular attribute. This gives you lazy
attribute creation on objects that are meant to be immutable.
Taken from the `Pyramid project <https://pypi.python.org/pypi/pyramid/>`_.
To use this as a decorator::
@reify
def lazy(self):
...
return hard_to_compute_int
first_time = self.lazy # lazy is reify obj, reify.__get__() runs
second_time = self.lazy # lazy is hard_to_compute_int
'''
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __get__(self, inst, cls):
if inst is None:
return self
retval = self.func(inst)
setattr(inst, self.func.__name__, retval)
return retval
def deprecated(obj):
'''
This is a decorator which can be used to mark functions and classes as
deprecated. It will result in a warning being emitted when a deprecated
function is called or a new instance of a class created.
In case of classes, the warning is emitted before the __new__ method
of the decorated class is called, therefore a way before the __init__
method itself.
'''
import warnings
from inspect import stack
from functools import wraps
from types import FunctionType, MethodType
new_obj = None
# wrap a function into a function emitting a deprecated warning
if isinstance(obj, FunctionType):
@wraps(obj)
def new_func(*args, **kwargs):
# get the previous stack frame and extract file, line and caller
# stack() -> caller()
call_file, call_line, caller = stack()[1][1:4]
# assemble warning
warning = (
'Call to deprecated function {} in {} line {}. '
'Called from {} line {}'
' by {}().\n'.format(
obj.__name__,
obj.__code__.co_filename,
obj.__code__.co_firstlineno + 1,
call_file, call_line, caller
)
)
warnings.warn('[{}] {}'.format('WARNING', warning))
# if there is a docstring present, emit docstring too
if obj.__doc__:
warnings.warn(obj.__doc__)
# return function wrapper
return obj(*args, **kwargs)
new_obj = new_func
# wrap a class into a class emitting a deprecated warning
# obj is class, type(obj) is metaclass, metaclasses inherit from type
elif isinstance(type(obj), type):
# we have an access to the metaclass instance (class) and need to print
# the warning when a class instance (object) is created with __new__
# i.e. when calling Class()
def obj_new(cls, child, *args, **kwargs):
'''
Custom metaclass instance's __new__ method with deprecated warning.
Calls the original __new__ method afterwards.
'''
# get the previous stack frame and extract file, line and caller
# stack() -> caller()
call_file, call_line, caller = stack()[1][1:4]
loc_file = obj.__module__
warnings.warn(
'[{}] Creating an instance of a deprecated class {} in {}.'
' Called from {} line {} by {}().\n'.format(
'WARNING', obj.__name__, loc_file,
call_file, call_line, caller
)
)
# if there is a docstring present, emit docstring too
if obj.__doc__:
warnings.warn(obj.__doc__)
# make sure nothing silly gets into the function
assert obj is cls
# we are creating a __new__ for a class that inherits from
# a deprecated class, therefore in this particular case
# MRO is (child, cls, object) > (cls, object)
if len(child.__mro__) > len(cls.__mro__):
assert cls is child.__mro__[1], (cls.__mro__, child.__mro__)
# we are creating __new__ directly for the deprecated class
# therefore MRO is the same for parent and child class
elif len(child.__mro__) == len(cls.__mro__):
assert cls is child
# return the class back with the extended __new__ method
return obj.__old_new__(child)
# back up the old __new__ method and create an extended
# __new__ method that emits deprecated warnings
obj.__old_new__ = obj.__new__
obj.__new__ = MethodType(obj_new, obj)
new_obj = obj
# return a function wrapper or an extended class
return new_obj
| mit | 244d5bef37c877a331f723c150e1b4af | 31.944828 | 79 | 0.559661 | 4.386593 | false | false | false | false |
kivy/plyer | plyer/facades/email.py | 1 | 1460 | '''
Email
=====
The :class:`Email` provides access to public methods to use email of your
device.
.. note::
On Android `INTERNET` permission is needed.
Simple Examples
---------------
To send an e-mail::
>>> from plyer import email
>>> recipient = 'abc@gmail.com'
>>> subject = 'Hi'
>>> text = 'This is an example.'
>>> create_chooser = False
>>> email.send(recipient=recipient, subject=subject, text=text,
create_chooser=create_chooser)
>>> # opens email interface where user can change the content.
Supported Platforms
-------------------
Android, iOS, Windows, OS X, Linux
'''
class Email:
'''
Email facade.
'''
def send(self, recipient=None, subject=None, text=None,
create_chooser=None):
'''
Open an email client message send window, prepopulated with the
given arguments.
:param recipient: Recipient of the message (str)
:param subject: Subject of the message (str)
:param text: Main body of the message (str)
:param create_chooser: Whether to display a program chooser to
handle the message (bool)
.. note:: create_chooser is only supported on Android
'''
self._send(recipient=recipient, subject=subject, text=text,
create_chooser=create_chooser)
# private
def _send(self, **kwargs):
raise NotImplementedError()
| mit | 721933c910cc15a9b3833f616184af9e | 24.172414 | 73 | 0.59726 | 4.294118 | false | false | false | false |
kivy/plyer | plyer/facades/irblaster.py | 1 | 2551 | '''
IrBlaster
============
The :class:`IrBlaster` provides access to public methods by which your device
can act as a remote and could be used to control your TV, AC, Music Player,
Projectors, Set top box or anything that can be controlled by a remote.
.. note::
- On Android your app needs the TRANSMIT_IR permission which allows an
application to use the device's IR transmitter, If available.
Simple Examples
---------------
To get transmit an IR sequence::
>>> from plyer import irblaster
>>> irblaster.transmit(frequency, pattern, mode)
To get frequencies::
>>> irblaster.frequencies
To check if IrBlaster exists::
>>> irblaster.exists()
True/False
Supported Platforms
-------------------
Android
'''
class IrBlaster:
'''
Infrared blaster facade.
'''
@staticmethod
def periods_to_microseconds(frequency, pattern):
'''
Convert a pattern from period counts to microseconds.
'''
period = 1000000. / frequency
return [period * x for x in pattern]
@staticmethod
def microseconds_to_periods(frequency, pattern):
'''
Convert a pattern from microseconds to period counts.
'''
period = 1000000. / frequency
return [x / period for x in pattern]
@property
def frequencies(self):
'''
Property which contains a list of frequency ranges
supported by the device in the form:
[(from1, to1),
(from2, to2),
...
(fromN, toN)]
'''
return self.get_frequencies()
def get_frequencies(self):
return self._get_frequencies()
def transmit(self, frequency, pattern, mode='period'):
'''
Transmit an IR sequence.
:parameters:
`frequency`: int
Carrier frequency for the IR transmission.
`pattern`: list[int]
Burst pair pattern to transmit.
`mode`: str, defaults to 'period'
Specifies the format of the pattern values.
Can be 'period' or 'microseconds'.
'''
return self._transmit(frequency, pattern, mode)
def exists(self):
'''
Check if the device has an infrared emitter.
'''
return self._exists()
# private
def _get_frequencies(self):
raise NotImplementedError()
def _transmit(self, frequency, pattern, mode):
raise NotImplementedError()
def _exists(self):
raise NotImplementedError()
| mit | d1f3266ac9a41ec65b236c132fb79add | 23.528846 | 77 | 0.594277 | 4.483304 | false | false | false | false |
kivy/plyer | plyer/platforms/macosx/libs/osx_motion_sensor.py | 1 | 3173 | import ctypes
from ctypes import (
Structure, cdll, sizeof,
c_int8, c_int16, c_size_t
)
from ctypes.util import find_library
import platform
ERROR_DICT = {
"0": "IOKit Framework not found, is this OSX?",
"-1": "No SMCMotionSensor service",
"-2": "No sms device",
"-3": "Could not open motion sensor device",
"-4": "Did not receive any coordinates"
}
IOKit = cdll.LoadLibrary(find_library('IOKit'))
class data_structure(Structure):
_fields_ = [
('x', c_int16),
('y', c_int16),
('z', c_int16),
('pad', c_int8 * 34),
]
void_p = ctypes.POINTER(ctypes.c_int)
kern_return_t = ctypes.c_int
KERN_SUCCESS = 0
KERN_FUNC = 5 # SMC Motion Sensor on MacBook Pro
mach_port_t = void_p
MACH_PORT_NULL = 0
io_object_t = ctypes.c_int
io_object_t = ctypes.c_int
io_iterator_t = void_p
io_object_t = void_p
io_connect_t = void_p
IOItemCount = ctypes.c_uint
CFMutableDictionaryRef = void_p
def is_os_64bit():
return platform.machine().endswith('64')
def read_sms():
result = kern_return_t()
masterPort = mach_port_t()
result = IOKit.IOMasterPort(MACH_PORT_NULL, ctypes.byref(masterPort))
IOKit.IOServiceMatching.restype = CFMutableDictionaryRef
matchingDictionary = IOKit.IOServiceMatching("SMCMotionSensor")
iterator = io_iterator_t()
result = IOKit.IOServiceGetMatchingServices(
masterPort, matchingDictionary,
ctypes.byref(iterator)
)
if (result != KERN_SUCCESS):
raise ("No coordinates received!")
return -1, None
IOKit.IOIteratorNext.restype = io_object_t
smsDevice = IOKit.IOIteratorNext(iterator)
if not smsDevice:
return -2, None
dataPort = io_connect_t()
result = IOKit.IOServiceOpen(
smsDevice, IOKit.mach_task_self(),
0, ctypes.byref(dataPort)
)
if (result != KERN_SUCCESS):
return -3, None
inStructure = data_structure()
outStructure = data_structure()
if is_os_64bit() or hasattr(IOKit, 'IOConnectCallStructMethod'):
structureInSize = IOItemCount(sizeof(data_structure))
structureOutSize = c_size_t(sizeof(data_structure))
result = IOKit.IOConnectCallStructMethod(
dataPort, KERN_FUNC,
ctypes.byref(inStructure), structureInSize,
ctypes.byref(outStructure), ctypes.byref(structureOutSize)
)
else:
structureInSize = IOItemCount(sizeof(data_structure))
structureOutSize = IOItemCount(sizeof(data_structure))
result = IOKit.IOConnectMethodStructureIStructureO(
dataPort, KERN_FUNC,
structureInSize, ctypes.byref(structureOutSize),
ctypes.byref(inStructure), ctypes.byref(outStructure)
)
IOKit.IOServiceClose(dataPort)
if (result != KERN_SUCCESS):
return -4, None
return 1, outStructure
def get_coord():
if not IOKit:
raise Exception(ERROR_DICT["0"])
ret, data = read_sms()
if (ret > 0):
if data.x:
return (data.x, data.y, data.z)
else:
return (None, None, None)
else:
raise Exception(ERROR_DICT[str(ret)])
| mit | f3a5dce473139a435ce3931ab53390cf | 23.789063 | 73 | 0.636621 | 3.379127 | false | false | false | false |
uploadcare/pyuploadcare | pyuploadcare/transformations/image.py | 1 | 10674 | from typing import List, Optional, Union
from pyuploadcare.transformations.base import BaseTransformation, StrEnum
class StretchMode(StrEnum):
on = "on"
off = "off"
fill = "fill"
class CropAlignment(StrEnum):
center = "center" # type: ignore
top = "top"
right = "right"
bottom = "bottom"
left = "left"
class ScaleCropMode(StrEnum):
center = "center" # type: ignore
smart = "smart"
smart_faces_objects = "smart_faces_objects"
smart_faces_points = "smart_faces_points"
smart_objects_faces_points = "smart_objects_faces_points"
smart_objects_faces = "smart_objects_faces"
smart_objects_points = "smart_objects_points"
smart_points = "smart_points"
smart_objects = "smart_objects"
smart_faces = "smart_faces"
class ImageFormat(StrEnum):
jpeg = "jpeg"
png = "png"
webp = "webp"
auto = "auto"
class ImageQuality(StrEnum):
normal = "normal"
better = "better"
best = "best"
lighter = "lighter"
lightest = "lightest"
smart = "smart"
smart_retina = "smart_retina"
class Gif2VideoFormat(StrEnum):
mp4 = "mp4"
webm = "webm"
class Gif2VideoQuality(StrEnum):
lightest = "lightest"
lighter = "lighter"
normal = "normal"
better = "better"
best = "best"
class ColorAdjustment(StrEnum):
brightness = "brightness"
exposure = "exposure"
gamma = "gamma"
contrast = "contrast"
saturation = "saturation"
vibrance = "vibrance"
warmth = "warmth"
class ImageFilter(StrEnum):
adaris = "adaris"
briaril = "briaril"
calarel = "calarel"
carris = "carris"
cynarel = "cynarel"
cyren = "cyren"
elmet = "elmet"
elonni = "elonni"
enzana = "enzana"
erydark = "erydark"
fenralan = "fenralan"
ferand = "ferand"
galen = "galen"
gavin = "gavin"
gethriel = "gethriel"
iorill = "iorill"
iothari = "iothari"
iselva = "iselva"
jadis = "jadis"
lavra = "lavra"
misiara = "misiara"
namala = "namala"
nerion = "nerion"
nethari = "nethari"
pamaya = "pamaya"
sarnar = "sarnar"
sedis = "sedis"
sewen = "sewen"
sorahel = "sorahel"
sorlen = "sorlen"
tarian = "tarian"
thellassan = "thellassan"
varriel = "varriel"
varven = "varven"
vevera = "vevera"
virkas = "virkas"
yedis = "yedis"
yllara = "yllara"
zatvel = "zatvel"
zevcen = "zevcen"
class SRGBConversion(StrEnum):
fast = "fast"
icc = "icc"
keep_profile = "keep_profile"
class OverlayOffset(StrEnum):
left = "left"
right = "right"
top = "top"
bottom = "bottom"
center = "center" # type: ignore
class ImageTransformation(BaseTransformation):
def preview(
self,
width: Optional[int] = None,
height: Optional[int] = None,
) -> "ImageTransformation":
parameters: List[str] = []
if width or height:
parameters.append(f'{width or ""}x{height or ""}')
self.set("preview", parameters)
return self
def resize(
self,
width: Optional[int] = None,
height: Optional[int] = None,
) -> "ImageTransformation":
parameters = [f'{width or ""}x{height or ""}']
self.set("resize", parameters)
return self
def stretch(self, mode: StretchMode) -> "ImageTransformation":
self.set("stretch", [mode])
return self
def smart_resize(self, width: int, height: int) -> "ImageTransformation":
parameters: List[str] = [f"{width}x{height}"]
self.set("smart_resize", parameters)
return self
def crop(
self,
width: int,
height: int,
offset_x: Optional[Union[int, str]] = None,
offset_y: Optional[Union[int, str]] = None,
alignment: Optional[CropAlignment] = None,
) -> "ImageTransformation":
parameters: List[str] = [f"{width}x{height}"]
if alignment:
parameters.append(alignment)
elif offset_x and offset_y:
parameters.append(f"{offset_x},{offset_y}")
self.set("crop", parameters)
return self
def scale_crop(
self,
width: int,
height: int,
offset_x_percent: Optional[int] = None,
offset_y_percent: Optional[int] = None,
mode: Optional[ScaleCropMode] = None,
) -> "ImageTransformation":
parameters: List[str] = [f"{width}x{height}"]
if offset_x_percent and offset_y_percent:
parameters.append(f"{offset_x_percent}p,{offset_y_percent}p")
elif mode:
parameters.append(mode)
self.set("scale_crop", parameters)
return self
def setfill(self, color: str) -> "ImageTransformation":
self.set("setfill", [color])
return self
def format(self, image_format: ImageFormat) -> "ImageTransformation":
self.set("format", [image_format])
return self
def quality(self, image_quality: ImageQuality) -> "ImageTransformation":
self.set("quality", [image_quality])
return self
def progressive(self, is_progressive=True) -> "ImageTransformation":
self.set("progressive", ["yes" if is_progressive else "no"])
return self
def gif2video(self) -> "ImageTransformation":
self.set("gif2video", [])
return self
def gif2video_format(
self, format: Gif2VideoFormat
) -> "ImageTransformation":
self.set("format", [format])
return self
def gif2video_quality(
self, quality: Gif2VideoQuality
) -> "ImageTransformation":
self.set("quality", [quality])
return self
def adjust_color(
self, adjustment: ColorAdjustment, value: Optional[int] = None
) -> "ImageTransformation":
parameters: List[str] = []
if value is not None:
parameters.append(str(value))
self.set(str(adjustment), parameters)
return self
def enhance(self, value: Optional[int] = None) -> "ImageTransformation":
parameters: List[str] = []
if value is not None:
parameters.append(str(value))
self.set("enhance", parameters)
return self
def grayscale(self) -> "ImageTransformation":
self.set("grayscale", [])
return self
def invert(self) -> "ImageTransformation":
self.set("invert", [])
return self
def filter(
self, image_filter: ImageFilter, value: Optional[int] = None
) -> "ImageTransformation":
parameters: List[str] = [image_filter]
if value is not None:
parameters.append(str(value))
self.set("filter", parameters)
return self
def srgb(self, conversion: SRGBConversion) -> "ImageTransformation":
parameters: List[str] = [conversion]
self.set("srgb", parameters)
return self
def max_icc_size(self, threshold: int) -> "ImageTransformation":
parameters: List[str] = [str(threshold)]
self.set("max_icc_size", parameters)
return self
def blur(
self, strength: Optional[int] = None, amount: Optional[int] = None
) -> "ImageTransformation":
parameters: List[str] = []
if strength:
parameters.append(str(strength))
if amount:
parameters.append(str(amount))
self.set("blur", parameters)
return self
def blur_region(
self,
region_width: Union[str, int],
region_height: Union[str, int],
offset_x: Union[str, int],
offset_y: Union[str, int],
strength: Optional[int] = None,
) -> "ImageTransformation":
parameters: List[str] = [
f"{region_width}x{region_height}",
f"{offset_x},{offset_y}",
]
if strength is not None:
parameters.append(str(strength))
self.set("blur_region", parameters)
return self
def blur_faces(
self, strength: Optional[int] = None
) -> "ImageTransformation":
parameters: List[str] = ["faces"]
if strength is not None:
parameters.append(str(strength))
self.set("blur_region", parameters)
return self
def sharp(self, strength: Optional[int] = None) -> "ImageTransformation":
parameters: List[str] = []
if strength is not None:
parameters.append(str(strength))
self.set("sharp", parameters)
return self
def overlay(
self,
uuid: str,
overlay_width: Union[str, int],
overlay_height: Union[str, int],
offset: Optional[OverlayOffset] = None,
offset_x: Optional[Union[str, int]] = None,
offset_y: Optional[Union[str, int]] = None,
strength: Optional[int] = None,
) -> "ImageTransformation":
parameters: List[str] = [
uuid,
f"{overlay_width}x{overlay_height}",
]
if offset:
parameters.append(str(offset))
else:
parameters.append(f"{offset_x},{offset_y}")
if strength is not None:
parameters.append(f"{strength}p")
self.set("overlay", parameters)
return self
def overlay_self(
self,
overlay_width: Union[str, int],
overlay_height: Union[str, int],
offset: Optional[OverlayOffset] = None,
offset_x: Optional[Union[str, int]] = None,
offset_y: Optional[Union[str, int]] = None,
strength: Optional[int] = None,
) -> "ImageTransformation":
parameters: List[str] = [
"self",
f"{overlay_width}x{overlay_height}",
]
if offset:
parameters.append(offset)
else:
parameters.append(f"{offset_x},{offset_y}")
if strength is not None:
parameters.append(f"{strength}p")
self.set("overlay", parameters)
return self
def autorotate(self, enabled=True) -> "ImageTransformation":
parameters: List[str] = ["yes" if enabled else "no"]
self.set("autorotate", parameters)
return self
def rotate(self, angle: int) -> "ImageTransformation":
parameters: List[str] = [str(angle)]
self.set("rotate", parameters)
return self
def flip(self) -> "ImageTransformation":
self.set("flip", [])
return self
def mirror(self) -> "ImageTransformation":
self.set("mirror", [])
return self
def path(self, file_id: str) -> str:
path_ = super().path(file_id)
path_ = path_.replace("/-/gif2video", "/gif2video")
return path_
| mit | ccba670124c5253f1c37cafb72c68476 | 26.510309 | 77 | 0.581132 | 3.634321 | false | false | false | false |
kivy/plyer | plyer/facades/gyroscope.py | 1 | 3203 | '''
Gyroscope
============
The gyroscope measures the rate of rotation around a device's x, y,
and z axis.
The :class:`Gyroscope` provides access to public methods to
use gyroscope of your device.
Simple Examples
---------------
To enable gyroscope::
>>> from plyer import gyroscope
>>> gyroscope.enable()
To disable gyroscope::
>>> gyroscope.disable()
To get the rate of rotation along the three axes::
>>> gyroscope.rotation
(-0.0034587313421070576, -0.0073830625042319298, 0.0046892408281564713)
To get the uncalibrated rate of rotation along the three axes along with the
drift compensation::
>>> gyroscope.rotation_uncalib
()
where the first three values show the rate of rotation w/o drift
compensation and the last three show the estimated drift along the three
axes.
Supported Platforms
-------------------
Android, iOS
'''
class Gyroscope:
'''
Gyroscope facade.
.. versionadded:: 1.3.1
'''
@property
def rotation(self):
'''
Property that returns the rate of rotation around the device's local
X, Y and Z axis.
Along x-axis: angular speed around the X axis
Along y-axis: angular speed around the Y axis
Along z-axis: angular speed around the Z axis
Returns (None, None, None) if no data is currently available.
'''
return self.get_orientation()
@property
def rotation_uncalib(self):
'''
Property that returns the current rate of rotation around the X, Y and
Z axis. An estimation of the drift on each axis is reported as well.
Along x-axis: angular speed (w/o drift compensation) around the X axis
Along y-axis: angular speed (w/o drift compensation) around the Y axis
Along z-axis: angular speed (w/o drift compensation) around the Z axis
Along x-axis: estimated drift around X axis
Along y-axis: estimated drift around Y axis
Along z-axis: estimated drift around Z axis
Returns (None, None, None, None, None, None) if no data is currently
available.
'''
return self.get_rotation_uncalib()
@property
def orientation(self):
'''
WARNING:: This property is deprecated after API Level 8.
Use `gyroscope.rotation` instead.
Property that returns values of the current Gyroscope sensors, as
a (x, y, z) tuple. Returns (None, None, None) if no data is currently
available.
'''
return self.get_orientation()
def enable(self):
'''
Activate the Gyroscope sensor.
'''
self._enable()
def disable(self):
'''
Disable the Gyroscope sensor.
'''
self._disable()
def get_orientation(self):
return self._get_orientation()
def get_rotation_uncalib(self):
return self._get_rotation_uncalib()
# private
def _enable(self):
raise NotImplementedError()
def _disable(self):
raise NotImplementedError()
def _get_orientation(self):
raise NotImplementedError()
def _get_rotation_uncalib(self):
raise NotImplementedError()
| mit | 147d4c24296f1abceb539fa96f5cfd9a | 24.420635 | 78 | 0.632844 | 4.013784 | false | false | false | false |
kivy/plyer | examples/storagepath/main.py | 1 | 1737 | '''
Storage Path Example.
'''
from kivy.lang import Builder
from kivy.app import App
from kivy.uix.boxlayout import BoxLayout
Builder.load_string('''
#: import storagepath plyer.storagepath
<StoragePathInterface>:
BoxLayout:
orientation: 'vertical'
BoxLayout:
Button:
text: 'Home'
on_press: label.text = str(storagepath.get_home_dir())
Button:
text: 'External Storage'
on_press:
label.text = str(storagepath.get_external_storage_dir())
BoxLayout:
Button:
text: 'Root'
on_press: label.text = str(storagepath.get_root_dir())
Button:
text: 'Documents'
on_press: label.text = str(storagepath.get_documents_dir())
BoxLayout:
Button:
text: 'Downloads'
on_press: label.text = str(storagepath.get_downloads_dir())
Button:
text: 'Videos'
on_press: label.text = str(storagepath.get_videos_dir())
BoxLayout:
Button:
text: 'Music'
on_press: label.text = str(storagepath.get_music_dir())
Button:
text: 'Pictures'
on_press: label.text = str(storagepath.get_pictures_dir())
Button:
text: 'Applications'
on_press: label.text = str(storagepath.get_application_dir())
Label:
id: label
''')
class StoragePathInterface(BoxLayout):
pass
class StoragePathApp(App):
def build(self):
return StoragePathInterface()
if __name__ == "__main__":
StoragePathApp().run()
| mit | a5b4150ee9a7b468bd6af3a61fa180a2 | 27.016129 | 76 | 0.533103 | 4.205811 | false | false | false | false |
kivy/plyer | plyer/platforms/macosx/uniqueid.py | 2 | 1071 | '''
Module of MacOS API for plyer.uniqueid.
'''
from os import environ
from subprocess import Popen, PIPE
from plyer.facades import UniqueID
from plyer.utils import whereis_exe
class OSXUniqueID(UniqueID):
'''
Implementation of MacOS uniqueid API.
'''
def _get_uid(self):
old_lang = environ.get('LANG')
environ['LANG'] = 'C'
ioreg_process = Popen(["ioreg", "-l"], stdout=PIPE)
grep_process = Popen(
["grep", "IOPlatformSerialNumber"],
stdin=ioreg_process.stdout, stdout=PIPE
)
ioreg_process.stdout.close()
output = grep_process.communicate()[0]
if old_lang is None:
environ.pop('LANG')
else:
environ['LANG'] = old_lang
result = None
if output:
result = output.split()[3][1:-1]
return result
def instance():
'''
Instance for facade proxy.
'''
import sys
if whereis_exe('ioreg'):
return OSXUniqueID()
sys.stderr.write("ioreg not found.")
return UniqueID()
| mit | 3284db57ef718b109fe048619c25bdb2 | 21.787234 | 59 | 0.578898 | 3.705882 | false | false | false | false |
uploadcare/pyuploadcare | pyuploadcare/api/entities.py | 1 | 3405 | from datetime import datetime
from decimal import Decimal
from enum import Enum
from typing import Dict, List, Optional, Tuple
from uuid import UUID
from pydantic import BaseModel, EmailStr, PrivateAttr
class Entity(BaseModel):
...
class IDEntity(Entity):
id: UUID
class UUIDEntity(Entity):
uuid: UUID
class Patterns(str, Enum):
DEFAULT = "${default}"
AUTO_FILENAME = "${auto_filename}"
EFFECTS = "${effects}"
FILENAME = "${filename}"
UUID = "${uuid}"
EXT = "${ext}"
class ColorMode(str, Enum):
RGB = "RGB"
RGBA = "RGBA"
RGBa = "RGBa"
RGBX = "RGBX"
L = "L"
LA = "LA"
La = "La"
P = "P"
PA = "PA"
CMYK = "CMYK"
YCbCr = "YCbCr"
HSV = "HSV"
LAB = "LAB"
class GEOPoint(Entity):
latitude: float
longitude: float
class ImageInfo(Entity):
color_mode: ColorMode
orientation: Optional[int]
format: str
sequence: bool
height: int
width: int
geo_location: Optional[GEOPoint]
datetime_original: Optional[datetime]
dpi: Optional[Tuple[int, int]]
class AudioStreamInfo(Entity):
bitrate: Optional[Decimal]
codec: Optional[str]
sample_rate: Optional[Decimal]
channels: Optional[str]
class VideoStreamInfo(Entity):
height: Decimal
width: Decimal
frame_rate: float
bitrate: Decimal
codec: str
class VideoInfo(Entity):
duration: Decimal
format: str
bitrate: Decimal
audio: Optional[AudioStreamInfo]
video: VideoStreamInfo
class FileInfo(UUIDEntity):
datetime_removed: Optional[datetime]
datetime_stored: Optional[datetime]
datetime_uploaded: Optional[datetime]
image_info: Optional[ImageInfo]
is_image: Optional[bool]
is_ready: Optional[bool]
mime_type: Optional[str]
original_file_url: Optional[str]
original_filename: Optional[str]
size: Optional[int]
url: Optional[str]
variations: Optional[Dict[str, UUID]]
video_info: Optional[VideoInfo]
source: Optional[str]
rekognition_info: Optional[Dict[str, Decimal]]
class GroupInfo(Entity):
id: str
_fetched: Optional[bool] = PrivateAttr(default=False)
datetime_created: Optional[datetime]
datetime_stored: Optional[datetime]
files_count: Optional[int]
cdn_url: Optional[str]
url: Optional[str]
files: Optional[List[Optional[FileInfo]]]
class ColaboratorInfo(Entity):
email: Optional[EmailStr]
name: Optional[str]
class ProjectInfo(Entity):
collaborators: Optional[List[ColaboratorInfo]]
name: str
pub_key: str
autostore_enabled: Optional[bool]
class Webhook(Entity):
id: int
created: Optional[datetime]
updated: Optional[datetime]
event: Optional[str]
target_url: Optional[str]
project: Optional[str]
is_active: Optional[bool]
signing_secret: Optional[str]
class DocumentConvertShortInfo(Entity):
uuid: UUID
class DocumentConvertInfo(DocumentConvertShortInfo):
original_source: str
token: int
class DocumentConvertStatus(Entity):
status: str
error: Optional[str]
result: DocumentConvertShortInfo
class VideoConvertShortInfo(Entity):
uuid: UUID
thumbnails_group_uuid: str
class VideoConvertInfo(VideoConvertShortInfo):
token: int
original_source: str
class VideoConvertStatus(Entity):
status: str
error: Optional[str]
result: VideoConvertShortInfo
| mit | 129f9feeb38414be958e0f409c65cc37 | 19.389222 | 57 | 0.681057 | 3.62234 | false | false | false | false |
kivy/plyer | plyer/platforms/android/gravity.py | 1 | 2078 | '''
Android gravity
---------------------
'''
from jnius import autoclass
from jnius import cast
from jnius import java_method
from jnius import PythonJavaClass
from plyer.facades import Gravity
from plyer.platforms.android import activity
Context = autoclass('android.content.Context')
Sensor = autoclass('android.hardware.Sensor')
SensorManager = autoclass('android.hardware.SensorManager')
class GravitySensorListener(PythonJavaClass):
__javainterfaces__ = ['android/hardware/SensorEventListener']
def __init__(self):
super().__init__()
service = activity.getSystemService(Context.SENSOR_SERVICE)
self.SensorManager = cast('android.hardware.SensorManager', service)
self.sensor = self.SensorManager.getDefaultSensor(
Sensor.TYPE_GRAVITY
)
self.values = [None, None, None]
def enable(self):
self.SensorManager.registerListener(
self,
self.sensor,
SensorManager.SENSOR_DELAY_NORMAL
)
def disable(self):
self.SensorManager.unregisterListener(self, self.sensor)
@java_method('(Landroid/hardware/SensorEvent;)V')
def onSensorChanged(self, event):
self.values = event.values[:3]
@java_method('(Landroid/hardware/Sensor;I)V')
def onAccuracyChanged(self, sensor, accuracy):
pass
class AndroidGravity(Gravity):
def __init__(self):
super().__init__()
self.state = False
def _enable(self):
if not self.state:
self.listener = GravitySensorListener()
self.listener.enable()
self.state = True
def _disable(self):
if self.state:
self.state = False
self.listener.disable()
del self.listener
def _get_gravity(self):
if self.state:
return tuple(self.listener.values)
else:
return (None, None, None)
def __del__(self):
if self.state:
self._disable()
super().__del__()
def instance():
return AndroidGravity()
| mit | 2541447dfdf3c4ef59f8a2b76227a5fe | 23.738095 | 76 | 0.62127 | 4.106719 | false | false | false | false |
uploadcare/pyuploadcare | pyuploadcare/api/api.py | 1 | 11929 | import hashlib
import hmac
from time import time
from typing import Any, Dict, Iterable, List, Optional, Union, cast
from uuid import UUID
from httpx._types import RequestFiles
from pyuploadcare.api import entities, responses
from pyuploadcare.api.base import (
API,
CreateMixin,
DeleteMixin,
DeleteWithResponseMixin,
ListCountMixin,
ListMixin,
RetrieveMixin,
UpdateMixin,
)
from pyuploadcare.exceptions import APIError
class FilesAPI(API, ListCountMixin, RetrieveMixin, DeleteWithResponseMixin):
resource_type = "files"
response_classes = {
"retrieve": entities.FileInfo,
"list": responses.FileListResponse,
"count": responses.FileListResponse,
"store": entities.FileInfo,
"update": entities.FileInfo,
"delete": entities.FileInfo,
"batch_store": responses.BatchFileOperationResponse,
"batch_delete": responses.BatchFileOperationResponse,
"local_copy": responses.CreateLocalCopyResponse,
"remote_copy": responses.CreateRemoteCopyResponse,
}
def store(self, file_uuid: Union[UUID, str]) -> entities.FileInfo:
url = self._build_url(file_uuid, suffix="storage")
response_class = self._get_response_class("store")
json_response = self._client.put(url).json()
response = self._parse_response(json_response, response_class)
return cast(entities.FileInfo, response)
def batch_store(
self, file_uuids: Iterable[Union[UUID, str]]
) -> responses.BatchFileOperationResponse:
url = self._build_url(suffix="storage")
response_class = self._get_response_class("batch_store")
json_response = self._client.put(url, json=file_uuids).json()
response = self._parse_response(json_response, response_class)
return cast(responses.BatchFileOperationResponse, response)
def batch_delete(
self, file_uuids: Iterable
) -> responses.BatchFileOperationResponse:
url = self._build_url(suffix="storage")
response_class = self._get_response_class("batch_delete")
json_response = self._client.delete_with_payload(
url, json=file_uuids
).json()
response = self._parse_response(json_response, response_class)
return cast(responses.BatchFileOperationResponse, response)
def local_copy(
self, source: Union[UUID, str], store: bool = False
) -> responses.CreateLocalCopyResponse:
url = self._build_url(suffix="local_copy")
data = {"source": source, "store": store}
response_class = self._get_response_class("local_copy")
json_response = self._client.post(url, json=data).json()
response = self._parse_response(json_response, response_class)
return cast(responses.CreateLocalCopyResponse, response)
def remote_copy(
self,
source: Union[UUID, str],
target: str,
make_public: bool = True,
pattern: str = "${default}",
) -> responses.CreateRemoteCopyResponse:
url = self._build_url(suffix="remote_copy")
data = {
"source": source,
"target": target,
"make_public": make_public,
"pattern": pattern,
}
response_class = self._get_response_class("remote_copy")
json_response = self._client.post(url, json=data).json()
response = self._parse_response(json_response, response_class)
return cast(responses.CreateRemoteCopyResponse, response)
class GroupsAPI(API, ListCountMixin, RetrieveMixin):
resource_type = "groups"
entity_class = entities.GroupInfo
response_classes = {
"retrieve": entities.GroupInfo,
"list": responses.GroupListResponse,
"count": responses.GroupListResponse,
}
def store(self, file_uuid: Union[UUID, str]) -> Dict[str, Any]:
url = self._build_url(file_uuid, suffix="storage")
return self._client.put(url).json()
class ProjectAPI(API, RetrieveMixin):
resource_type = "project"
entity_class = entities.ProjectInfo
response_classes = {
"retrieve": entities.ProjectInfo,
}
class WebhooksAPI(API, CreateMixin, ListMixin, UpdateMixin, DeleteMixin):
resource_type = "webhooks"
entity_class = entities.Webhook
response_classes = {
"create": entities.Webhook,
"list": List[entities.Webhook], # type: ignore
"update": entities.Webhook,
}
class DocumentConvertAPI(API):
resource_type = "convert/document"
entity_class = entities.DocumentConvertInfo
response_classes = {
"convert": responses.DocumentConvertResponse,
"status": entities.DocumentConvertStatus,
}
def convert(
self,
paths: List[str],
store: Optional[bool] = None,
) -> responses.DocumentConvertResponse:
url = self._build_url()
data = {
"paths": paths,
}
if isinstance(store, bool):
data["store"] = str(store).lower() # type: ignore
response_class = self._get_response_class("convert")
document = self._client.post(url, json=data).json()
response = self._parse_response(document, response_class)
return cast(responses.DocumentConvertResponse, response)
def status(self, token: int) -> entities.DocumentConvertStatus:
url = self._build_url(suffix=f"status/{token}")
response_class = self._get_response_class("status")
document = self._client.get(url).json()
response = self._parse_response(document, response_class)
return cast(entities.DocumentConvertStatus, response)
class VideoConvertAPI(API):
resource_type = "convert/video"
entity_class = entities.VideoConvertInfo
response_classes = {
"convert": responses.VideoConvertResponse,
"status": entities.VideoConvertStatus,
}
def convert(
self,
paths: List[str],
store: Optional[bool] = None,
) -> responses.VideoConvertResponse:
url = self._build_url()
data = {
"paths": paths,
}
if isinstance(store, bool):
data["store"] = str(store).lower() # type: ignore
response_class = self._get_response_class("convert")
document = self._client.post(url, json=data).json()
response = self._parse_response(document, response_class)
return cast(responses.VideoConvertResponse, response)
def status(self, token: int) -> entities.VideoConvertStatus:
url = self._build_url(suffix=f"status/{token}")
response_class = self._get_response_class("status")
document = self._client.get(url).json()
response = self._parse_response(document, response_class)
return cast(entities.VideoConvertStatus, response)
class UploadAPI(API):
resource_type = "base"
@staticmethod
def _generate_secure_signature(secret: str, expire: int):
return hmac.new(
secret.encode("utf-8"), str(expire).encode("utf-8"), hashlib.sha256
).hexdigest()
def upload(
self,
files: RequestFiles,
secure_upload: bool = False,
public_key: Optional[str] = None,
secret_key: Optional[str] = None,
store: Optional[str] = "auto",
expire: Optional[int] = None,
) -> Dict[str, Any]:
data = {}
data["UPLOADCARE_STORE"] = store
if public_key is None:
public_key = self.public_key
data["UPLOADCARE_PUB_KEY"] = public_key
if secure_upload:
if secret_key is None:
secret_key = self.secret_key
if expire is None:
expire = int(time()) + self.signed_uploads_ttl
data["expire"] = str(expire)
signature = self._generate_secure_signature(secret_key, expire) # type: ignore
data["signature"] = signature
url = self._build_url()
document = self._client.post(url, data=data, files=files)
return document.json()
def start_multipart_upload(
self,
file_name: str,
file_size: int,
content_type: str,
store: Optional[str] = None,
secure_upload: bool = False,
expire: Optional[int] = None,
):
data = {
"filename": file_name,
"size": str(file_size),
"content_type": content_type,
"UPLOADCARE_PUB_KEY": self.public_key,
}
if store is not None:
data["UPLOADCARE_STORE"] = store
if secure_upload:
expire = (
(int(time()) + self.signed_uploads_ttl)
if expire is None
else expire
)
data["expire"] = str(expire)
data["signature"] = self._generate_secure_signature(
self.secret_key, expire # type: ignore
)
url = self._build_url(base="multipart/start")
document = self._client.post(url, data=data)
return document.json()
def multipart_upload_chunk(self, url: str, chunk: bytes):
document = self._client.put(
url,
content=chunk,
headers={"Content-Type": "application/octet-stream"},
)
return document.content
def multipart_complete(self, uuid: UUID):
data = {
"uuid": str(uuid),
"UPLOADCARE_PUB_KEY": self.public_key,
}
url = self._build_url(base="multipart/complete")
document = self._client.post(url, data=data)
return document.json()
def upload_from_url(
self,
source_url,
store="auto",
filename=None,
secure_upload: bool = False,
expire: Optional[int] = None,
) -> str:
data = {
"source_url": source_url,
"store": store,
"pub_key": self.public_key,
}
if filename:
data["filename"] = filename
if secure_upload:
expire = (
(int(time()) + self.signed_uploads_ttl)
if expire is None
else expire
)
data["expire"] = str(expire)
data["signature"] = self._generate_secure_signature(
self.secret_key, expire # type: ignore
)
url = self._build_url(base="/from_url")
document = self._client.post(url, data=data)
response = document.json()
if "token" not in response:
raise APIError(f"could not find token in result: {response}")
return response["token"]
def get_upload_from_url_status(self, token: str) -> Dict[str, Any]:
query_parameters = {
"token": token,
}
url = self._build_url(
base="/from_url/status", query_parameters=query_parameters
)
document = self._client.get(url)
response = document.json()
if "status" not in response:
raise APIError(f"could not find status in result: {response}")
return response
def create_group(
self,
files: Iterable[Union[str, UUID]],
secure_upload: bool = False,
expire: Optional[int] = None,
):
data = {
"pub_key": self.public_key,
}
for index, file in enumerate(files):
data[f"files[{index}]"] = file # type: ignore
if secure_upload:
expire = (
(int(time()) + self.signed_uploads_ttl)
if expire is None
else expire
)
data["expire"] = str(expire)
data["signature"] = self._generate_secure_signature(
self.secret_key, expire # type: ignore
)
url = self._build_url(base="/group/")
document = self._client.post(url, data=data)
return document.json()
| mit | d4622c4ddbcb8fdc5a560716f9af8050 | 31.504087 | 91 | 0.592003 | 4.093686 | false | false | false | false |
kivy/plyer | plyer/platforms/win/libs/balloontip.py | 1 | 6418 | # -- coding: utf-8 --
'''
Module of Windows API for creating taskbar balloon tip
notification in the taskbar's tray notification area.
'''
__all__ = ('WindowsBalloonTip', 'balloon_tip')
import time
import ctypes
import atexit
from threading import RLock
from plyer.platforms.win.libs import win_api_defs
WS_OVERLAPPED = 0x00000000
WS_SYSMENU = 0x00080000
WM_DESTROY = 2
CW_USEDEFAULT = 8
LR_LOADFROMFILE = 16
LR_DEFAULTSIZE = 0x0040
IDI_APPLICATION = 32512
IMAGE_ICON = 1
NOTIFYICON_VERSION_4 = 4
NIM_ADD = 0
NIM_MODIFY = 1
NIM_DELETE = 2
NIM_SETVERSION = 4
NIF_MESSAGE = 1
NIF_ICON = 2
NIF_TIP = 4
NIF_INFO = 0x10
NIIF_USER = 4
NIIF_LARGE_ICON = 0x20
class WindowsBalloonTip:
'''
Implementation of balloon tip notifications through Windows API.
* Register Window class name:
https://msdn.microsoft.com/en-us/library/windows/desktop/ms632596.aspx
* Create an overlapped window using the registered class.
- It's hidden everywhere in GUI unless ShowWindow(handle, SW_SHOW)
function is called.
* Show/remove a tray icon and a balloon tip notification.
Each instance is a separate notification with different parameters.
Can be used with Threads.
'''
_class_atom = 0
_wnd_class_ex = None
_hwnd = None
_hicon = None
_balloon_icon = None
_notify_data = None
_count = 0
_lock = RLock()
@staticmethod
def _get_unique_id():
'''
Keep track of each created balloon tip notification names,
so that they can be easily identified even from outside.
Make sure the count is shared between all the instances
i.e. use a lock, so that _count class variable is incremented
safely when using balloon tip notifications with Threads.
'''
WindowsBalloonTip._lock.acquire()
val = WindowsBalloonTip._count
WindowsBalloonTip._count += 1
WindowsBalloonTip._lock.release()
return val
def __init__(self, title, message, app_name, app_icon='',
timeout=10, **kwargs):
'''
The app_icon parameter, if given, is an .ICO file.
'''
atexit.register(self.__del__)
wnd_class_ex = win_api_defs.get_WNDCLASSEXW()
class_name = 'PlyerTaskbar' + str(WindowsBalloonTip._get_unique_id())
wnd_class_ex.lpszClassName = class_name
# keep ref to it as long as window is alive
wnd_class_ex.lpfnWndProc = win_api_defs.WindowProc(
win_api_defs.DefWindowProcW
)
wnd_class_ex.hInstance = win_api_defs.GetModuleHandleW(None)
if wnd_class_ex.hInstance is None:
raise Exception('Could not get windows module instance.')
class_atom = win_api_defs.RegisterClassExW(wnd_class_ex)
if class_atom == 0:
raise Exception('Could not register the PlyerTaskbar class.')
self._class_atom = class_atom
self._wnd_class_ex = wnd_class_ex
# create window
self._hwnd = win_api_defs.CreateWindowExW(
# dwExStyle, lpClassName, lpWindowName, dwStyle
0, class_atom, '', WS_OVERLAPPED,
# x, y, nWidth, nHeight
0, 0, CW_USEDEFAULT, CW_USEDEFAULT,
# hWndParent, hMenu, hInstance, lpParam
None, None, wnd_class_ex.hInstance, None
)
if self._hwnd is None:
raise Exception('Could not get create window.')
win_api_defs.UpdateWindow(self._hwnd)
# load .ICO file for as balloon tip and tray icon
if app_icon:
icon_flags = LR_LOADFROMFILE | LR_DEFAULTSIZE
hicon = win_api_defs.LoadImageW(
None, app_icon, IMAGE_ICON, 0, 0, icon_flags
)
if hicon is None:
raise Exception('Could not load icon {}'.format(app_icon))
self._balloon_icon = self._hicon = hicon
else:
self._hicon = win_api_defs.LoadIconW(
None,
ctypes.cast(IDI_APPLICATION, win_api_defs.LPCWSTR)
)
# show the notification
self.notify(title, message, app_name)
if timeout:
time.sleep(timeout)
def __del__(self):
'''
Clean visible parts of the notification object, then free all resources
allocated for creating the nofitication Window and icon.
'''
self.remove_notify()
if self._hicon is not None:
win_api_defs.DestroyIcon(self._hicon)
if self._wnd_class_ex is not None:
win_api_defs.UnregisterClassW(
self._class_atom,
self._wnd_class_ex.hInstance
)
if self._hwnd is not None:
win_api_defs.DestroyWindow(self._hwnd)
def notify(self, title, message, app_name):
'''
Displays a balloon in the systray. Can be called multiple times
with different parameter values.
'''
# remove previous visible balloon tip nofitication if available
self.remove_notify()
# add icon and messages to window
hicon = self._hicon
flags = NIF_TIP | NIF_INFO
icon_flag = 0
if hicon is not None:
flags |= NIF_ICON
# if icon is default app's one, don't display it in message
if self._balloon_icon is not None:
icon_flag = NIIF_USER | NIIF_LARGE_ICON
notify_data = win_api_defs.get_NOTIFYICONDATAW(
0, self._hwnd,
id(self), flags, 0, hicon, app_name, 0, 0, message,
NOTIFYICON_VERSION_4, title, icon_flag, win_api_defs.GUID(),
self._balloon_icon
)
self._notify_data = notify_data
if not win_api_defs.Shell_NotifyIconW(NIM_ADD, notify_data):
raise Exception('Shell_NotifyIconW failed.')
if not win_api_defs.Shell_NotifyIconW(NIM_SETVERSION,
notify_data):
raise Exception('Shell_NotifyIconW failed.')
def remove_notify(self):
'''
Removes the notify balloon, if displayed.
'''
if self._notify_data is not None:
win_api_defs.Shell_NotifyIconW(NIM_DELETE, self._notify_data)
self._notify_data = None
def balloon_tip(**kwargs):
'''
Instance for balloon tip notification implementation.
'''
WindowsBalloonTip(**kwargs)
| mit | da267babf0528320e71260bcd25c2d25 | 30.15534 | 79 | 0.603303 | 3.722738 | false | false | false | false |
uploadcare/pyuploadcare | pyuploadcare/transformations/base.py | 1 | 1305 | from enum import Enum
from typing import List, Optional, Union
class StrEnum(str, Enum):
def __str__(self):
return self.value
class BaseTransformation:
def __init__(
self, transformation: Optional[Union[str, "BaseTransformation"]] = None
):
if isinstance(transformation, BaseTransformation):
transformation = transformation.effects
self._effects = []
if transformation:
transformation = transformation.rstrip("/") # type: ignore
self._effects.append(transformation)
def set(
self, transformation_name: str, parameters: List[str]
) -> "BaseTransformation":
effect = transformation_name
if parameters:
effect += "/" + "/".join(parameters)
self._effects.append(effect)
return self
def _prefix(self, file_id: str) -> str:
return f"{file_id}/"
def __str__(self):
return self.effects
@property
def effects(self):
effects_ = "/-/".join(self._effects)
if effects_:
effects_ += "/"
return effects_
def path(self, file_id: str) -> str:
path_ = self._prefix(file_id)
effects = self.effects
if effects:
path_ += "-/" + effects
return path_
| mit | 25b54d82253640b39cad54b5033d6a13 | 24.588235 | 79 | 0.571648 | 4.453925 | false | false | false | false |
kivy/plyer | plyer/platforms/macosx/wifi.py | 2 | 5218 | from pyobjus import autoclass
from pyobjus.dylib_manager import load_framework, INCLUDE
from plyer.facades import Wifi
load_framework(INCLUDE.Foundation)
load_framework(INCLUDE.CoreWLAN)
CWInterface = autoclass('CWInterface')
CWNetwork = autoclass('CWNetwork')
CWWiFiClient = autoclass('CWWiFiClient')
NSArray = autoclass('NSArray')
NSDictionary = autoclass('NSDictionary')
NSString = autoclass('NSString')
class OSXWifi(Wifi):
names = {}
def _is_enabled(self):
'''
Returns `True` if the Wifi is enabled else returns `False`.
'''
return CWWiFiClient.sharedWiFiClient().interface().powerOn()
def _get_network_info(self, name):
'''
Returns all the network information.
'''
accessNetworkType = self.names[name].accessNetworkType
aggregateRSSI = self.names[name].aggregateRSSI
beaconInterval = self.names[name].beaconInterval
bssid = self.names[name].bssid.UTF8String()
countryCode = self.names[name].countryCode
hasInternet = self.names[name].hasInternet
hasInterworkingIE = self.names[name].hasInterworkingIE
hessid = self.names[name].hessid
ibss = self.names[name].ibss
isAdditionalStepRequiredForAccess = \
self.names[name].isAdditionalStepRequiredForAccess
isCarPlayNetwork = self.names[name].isCarPlayNetwork
isEmergencyServicesReachable = \
self.names[name].isEmergencyServicesReachable
isPasspoint = self.names[name].isPasspoint
isPersonalHotspot = self.names[name].isPersonalHotspot
isUnauthenticatedEmergencyServiceAccessible = \
self.names[name].isUnauthenticatedEmergencyServiceAccessible
noiseMeasurement = self.names[name].noiseMeasurement
physicalLayerMode = self.names[name].physicalLayerMode
rssiValue = self.names[name].rssiValue
securityType = self.names[name].securityType
ssid = self.names[name].ssid.UTF8String()
supportsEasyConnect = self.names[name].supportsEasyConnect
supportsWPS = self.names[name].supportsWPS
venueGroup = self.names[name].venueGroup
venueType = self.names[name].venueType
return {'accessNetworkType': accessNetworkType,
'aggregateRSSI': aggregateRSSI,
'beaconInterval': beaconInterval,
'bssid': bssid,
'countryCode': countryCode,
'hasInternet': hasInternet,
'hasInterworkingIE': hasInterworkingIE,
'hessid': hessid,
'ibss': ibss,
'isAdditionalStepRequiredForAccess':
isAdditionalStepRequiredForAccess,
'isCarPlayNetwork': isCarPlayNetwork,
'isEmergencyServicesReachable': isEmergencyServicesReachable,
'isPasspoint': isPasspoint,
'isPersonalHotspot': isPersonalHotspot,
'isUnauthenticatedEmergencyServiceAccessible':
isUnauthenticatedEmergencyServiceAccessible,
'noiseMeasurement': noiseMeasurement,
'physicalLayerMode': physicalLayerMode,
'rssiValue': rssiValue,
'securityType': securityType,
'ssid': ssid,
'supportsEasyConnect': supportsEasyConnect,
'supportsWPS': supportsWPS,
'venueGroup': venueGroup,
'venueType': venueType}
def _start_scanning(self):
'''
Starts scanning for available Wi-Fi networks.
'''
if self._is_enabled():
self.names = {}
c = CWInterface.interface()
scan = c.scanForNetworksWithName_error_(None, None)
cnt = scan.allObjects().count()
for i in range(cnt):
self.names[
scan.allObjects().objectAtIndex_(i).ssid.UTF8String()
] = scan.allObjects().objectAtIndex_(i)
else:
raise Exception("Wifi not enabled.")
def _get_available_wifi(self):
'''
Returns the name of available networks.
'''
return self.names.keys()
def _connect(self, network, parameters):
'''
Expects 2 parameters:
- name/ssid of the network.
- password: dict type
'''
password = parameters['password']
network_object = self.names[network]
CWInterface.interface().associateToNetwork_password_error_(
network_object,
password,
None)
return
def _disconnect(self):
'''
Disconnect from network.
'''
CWInterface.interface().disassociate()
return
def _disable(self):
'''
Wifi interface power state is set to "OFF".
'''
interface = CWWiFiClient.sharedWiFiClient().interface()
interface.setPower_error_(False, None)
def _enable(self):
'''
Wifi interface power state is set to "ON".
'''
interface = CWWiFiClient.sharedWiFiClient().interface()
interface.setPower_error_(True, None)
def instance():
return OSXWifi()
| mit | 93dfc32a1631d0c09ba950240395ba56 | 34.496599 | 77 | 0.613645 | 4.21827 | false | false | false | false |
kivy/plyer | plyer/platforms/android/vibrator.py | 1 | 1844 | """Implementation Vibrator for Android."""
from jnius import autoclass, cast
from plyer.facades import Vibrator
from plyer.platforms.android import activity
from plyer.platforms.android import SDK_INT
Context = autoclass("android.content.Context")
vibrator_service = activity.getSystemService(Context.VIBRATOR_SERVICE)
vibrator = cast("android.os.Vibrator", vibrator_service)
if SDK_INT >= 26:
VibrationEffect = autoclass("android.os.VibrationEffect")
class AndroidVibrator(Vibrator):
"""Android Vibrator class.
Supported features:
* vibrate for some period of time.
* vibrate from given pattern.
* cancel vibration.
* check whether Vibrator exists.
"""
def _vibrate(self, time=None, **kwargs):
if vibrator:
if SDK_INT >= 26:
vibrator.vibrate(
VibrationEffect.createOneShot(
int(1000 * time), VibrationEffect.DEFAULT_AMPLITUDE
)
)
else:
vibrator.vibrate(int(1000 * time))
def _pattern(self, pattern=None, repeat=None, **kwargs):
pattern = [int(1000 * time) for time in pattern]
if vibrator:
if SDK_INT >= 26:
vibrator.vibrate(
VibrationEffect.createWaveform(pattern, repeat)
)
else:
vibrator.vibrate(pattern, repeat)
def _exists(self, **kwargs):
if SDK_INT >= 11:
return vibrator.hasVibrator()
elif vibrator_service is None:
raise NotImplementedError()
return True
def _cancel(self, **kwargs):
vibrator.cancel()
def instance():
"""Returns Vibrator with android features.
:return: instance of class AndroidVibrator
"""
return AndroidVibrator()
| mit | e88b6705540601d886a53e2f13afd96a | 28.269841 | 75 | 0.605206 | 3.882105 | false | false | false | false |
kivy/plyer | plyer/__init__.py | 1 | 4074 | '''
Plyer
=====
'''
__all__ = (
'accelerometer', 'audio', 'barometer', 'battery', 'bluetooth',
'brightness', 'call', 'camera', 'compass', 'cpu', 'email', 'filechooser',
'flash', 'gps', 'gravity', 'gyroscope', 'humidity', 'irblaster',
'keystore', 'light', 'notification', 'orientation', 'processors',
'proximity', 'screenshot', 'sms', 'spatialorientation', 'storagepath',
'stt', 'temperature', 'tts', 'uniqueid', 'vibrator', 'wifi', 'devicename'
)
__version__ = '2.2.0.dev0'
from plyer import facades
from plyer.utils import Proxy
#: Accelerometer proxy to :class:`plyer.facades.Accelerometer`
accelerometer = Proxy('accelerometer', facades.Accelerometer)
#: Keyring proxy to :class::`plyer.facades.Keystore`
keystore = Proxy('keystore', facades.Keystore)
#: Audio proxy to :class:`plyer.facades.Audio`
audio = Proxy('audio', facades.Audio)
#: Barometer proxy to :class:`plyer.facades.Barometer`
barometer = Proxy('barometer', facades.Barometer)
#: Battery proxy to :class:`plyer.facades.Battery`
battery = Proxy('battery', facades.Battery)
#: Call proxy to :class `plyer.facades.Call`
call = Proxy('call', facades.Call)
#: Compass proxy to :class:`plyer.facades.Compass`
compass = Proxy('compass', facades.Compass)
#: Camera proxy to :class:`plyer.facades.Camera`
camera = Proxy('camera', facades.Camera)
#: Email proxy to :class:`plyer.facades.Email`
email = Proxy('email', facades.Email)
#: FileChooser proxy to :class:`plyer.facades.FileChooser`
filechooser = Proxy('filechooser', facades.FileChooser)
#: GPS proxy to :class:`plyer.facades.GPS`
gps = Proxy('gps', facades.GPS)
#: Gravity proxy to :class:`plyer.facades.Gravity`
gravity = Proxy('gravity', facades.Gravity)
#: Gyroscope proxy to :class:`plyer.facades.Gyroscope`
gyroscope = Proxy('gyroscope', facades.Gyroscope)
#: IrBlaster proxy to :class:`plyer.facades.IrBlaster`
irblaster = Proxy('irblaster', facades.IrBlaster)
#: Light proxy to :class:`plyer.facades.Light`
light = Proxy('light', facades.Light)
#: Orientation proxy to :class:`plyer.facades.Orientation`
orientation = Proxy('orientation', facades.Orientation)
#: Notification proxy to :class:`plyer.facades.Notification`
notification = Proxy('notification', facades.Notification)
#: Proximity proxy to :class:`plyer.facades.Proximity`
proximity = Proxy('proximity', facades.Proximity)
#: Sms proxy to :class:`plyer.facades.Sms`
sms = Proxy('sms', facades.Sms)
#: Speech proxy to :class:`plyer.facades.STT`
stt = Proxy('stt', facades.STT)
#: TTS proxy to :class:`plyer.facades.TTS`
tts = Proxy('tts', facades.TTS)
#: UniqueID proxy to :class:`plyer.facades.UniqueID`
uniqueid = Proxy('uniqueid', facades.UniqueID)
#: Vibrator proxy to :class:`plyer.facades.Vibrator`
vibrator = Proxy('vibrator', facades.Vibrator)
#: Flash proxy to :class:`plyer.facades.Flash`
flash = Proxy('flash', facades.Flash)
#: Wifi proxy to :class:`plyer.facades.Wifi`
wifi = Proxy('wifi', facades.Wifi)
#: Temperature proxy to :class:`plyer.facades.Temperature`
temperature = Proxy('temperature', facades.Temperature)
#: Humidity proxy to :class:`plyer.facades.Humidity`
humidity = Proxy('humidity', facades.Humidity)
#: SpatialOrientation proxy to :class:`plyer.facades.SpatialOrientation`
spatialorientation = Proxy('spatialorientation', facades.SpatialOrientation)
#: Brightness proxy to :class:`plyer.facades.Brightness`
brightness = Proxy('brightness', facades.Brightness)
#: StoragePath proxy to :class:`plyer.facades.StoragePath`
storagepath = Proxy('storagepath', facades.StoragePath)
#: Bluetooth proxy to :class:`plyer.facades.Bluetooth`
bluetooth = Proxy('bluetooth', facades.Bluetooth)
#: Processors proxy to :class:`plyer.facades.Processors`
processors = Proxy('processors', facades.Processors)
#: Processors proxy to :class:`plyer.facades.CPU`
cpu = Proxy('cpu', facades.CPU)
#: Screenshot proxy to :class:`plyer.facades.Screenshot`
screenshot = Proxy('screenshot', facades.Screenshot)
#: devicename proxy to :class:`plyer.facades.DeviceName`
devicename = Proxy('devicename', facades.DeviceName)
| mit | e54881dac1c216bfeaa7f4d39663431e | 31.854839 | 77 | 0.731959 | 2.850945 | false | false | false | false |
caktus/django-timepiece | timepiece/tests/test_management.py | 3 | 8043 | from dateutil.relativedelta import relativedelta
from django.utils import timezone
from django.test import TestCase
from timepiece import utils
from timepiece.management.commands import check_entries
from timepiece.entries.models import Entry
from . import factories
class CheckEntries(TestCase):
def setUp(self):
super(CheckEntries, self).setUp()
self.user = factories.User()
self.user2 = factories.User()
self.superuser = factories.Superuser()
self.project = factories.Project(
type__enable_timetracking=True, status__enable_timetracking=True,
point_person=self.user)
self.default_data = {
'user': self.user,
'project': self.project,
'seconds_paused': 0,
'status': Entry.VERIFIED,
}
self.good_start = timezone.now() - relativedelta(days=0, hours=8)
self.good_end = timezone.now() - relativedelta(days=0)
self.bad_start = timezone.now() - relativedelta(days=1, hours=8)
self.bad_end = timezone.now() - relativedelta(days=1)
# Create users for the test
self.user.first_name = 'first1'
self.user.last_name = 'last1'
self.user.save()
self.user2.first_name = 'first2'
self.user2.last_name = 'last2'
self.user2.save()
self.all_users = [self.user, self.user2, self.superuser]
# Create a valid entry for all users on every day since 60 days ago
self.make_entry_bulk(self.all_users, 60)
# helper functions
def make_entry(self, **kwargs):
"""
Make a valid or invalid entry
make_entry(**kwargs)
**kwargs can include: start_time, end_time, valid
Without any kwargs, make_entry makes a valid entry. (first time called)
With valid=False, makes an invalid entry
start_time and end_time can be specified.
If start_time is used without end_time, end_time is 10 mintues later
"""
valid = kwargs.get('valid', True)
if valid:
default_start = self.good_start
default_end = self.good_end
else:
default_start = self.bad_start
default_end = self.bad_end
user = kwargs.get('user', self.user)
start = kwargs.get('start_time', default_start)
if 'end_time' in kwargs:
end = kwargs.get('end_time', default_end)
else:
if 'start_time' in kwargs:
end = start + relativedelta(minutes=10)
else:
end = default_end
data = self.default_data
data.update({
'user': user,
'start_time': start,
'end_time': end,
})
factories.Entry(**data)
def make_entry_bulk(self, users, days, *args, **kwargs):
"""
Create entries for users listed, from n days ago (but not today)
make_entry_bulk(users_list, num_days)
"""
# Test cases may create overlapping entries later
for user in users:
self.default_data.update({'user': user})
# Range uses 1 so that good_start/good_end use today as valid times.
for day in range(1, days + 1):
self.default_data.update({
'start_time': timezone.now() - relativedelta(days=day, minutes=1),
'end_time': timezone.now() - relativedelta(days=day,)
})
factories.Entry(**self.default_data)
# tests
def testFindStart(self):
"""
With various kwargs, find_start should return the correct date
"""
# Establish some datetimes
now = timezone.now()
today = now - relativedelta(
hour=0, minute=0, second=0, microsecond=0)
last_billing = today - relativedelta(months=1, day=1)
yesterday = today - relativedelta(days=1)
ten_days_ago = today - relativedelta(days=10)
thisweek = utils.get_week_start(today)
thismonth = today - relativedelta(day=1)
thisyear = today - relativedelta(month=1, day=1)
# Use command flags to obtain datetimes
start_default = check_entries.Command().find_start()
start_yesterday = check_entries.Command().find_start(days=1)
start_ten_days_ago = check_entries.Command().find_start(days=10)
start_of_week = check_entries.Command().find_start(week=True)
start_of_month = check_entries.Command().find_start(month=True)
start_of_year = check_entries.Command().find_start(year=True)
# assure the returned datetimes are correct
self.assertEqual(start_default, last_billing)
self.assertEqual(start_yesterday, yesterday)
self.assertEqual(start_ten_days_ago, ten_days_ago)
self.assertEqual(start_of_week, thisweek)
self.assertEqual(start_of_month, thismonth)
self.assertEqual(start_of_year, thisyear)
def testFindUsers(self):
"""
With args, find_users should search and return those user objects
Without args, find_users should return all user objects
"""
# Find one user by icontains first or last name, return all if no args
users1 = check_entries.Command().find_users('firsT1')
users2 = check_entries.Command().find_users('LasT2')
all_users = check_entries.Command().find_users()
# obtain instances from the querysets
user1 = users1.get(pk=self.user.pk)
user2 = users2.get(pk=self.user2.pk)
all_1 = all_users.get(pk=self.user.pk)
all_2 = all_users.get(pk=self.user2.pk)
all_3 = all_users.get(pk=self.superuser.pk)
self.assertEqual(users1.count(), 1)
self.assertEqual(users2.count(), 1)
self.assertEqual(all_users.count(), 3)
self.assertEqual(user1, self.user)
self.assertEqual(user2, self.user2)
self.assertEqual(all_1, user1)
self.assertEqual(all_2, user2)
self.assertEqual(all_3, self.superuser)
def testFindEntries(self):
"""
Given a list of users and a starting point, entries should generate a
list of all entries for each user from that time until now.
"""
start = check_entries.Command().find_start()
if start.day == 1:
start += relativedelta(days=1)
all_users = check_entries.Command().find_users()
entries = check_entries.Command().find_entries(all_users, start)
# Determine the number of days checked
today = timezone.now() - \
relativedelta(hour=0, minute=0, second=0, microsecond=0)
diff = today - start
days_checked = diff.days
total_entries = 0
while True:
try:
user_entries = next(entries)
for entry in user_entries:
total_entries += 1
except StopIteration:
# Verify that every entry from the start point was returned
expected_total = days_checked * len(self.all_users)
self.assertEqual(total_entries, expected_total)
return
def testCheckEntry(self):
"""
Given lists of entries from users, check_entry should return all
overlapping entries.
"""
start = check_entries.Command().find_start()
all_users = check_entries.Command().find_users()
entries = check_entries.Command().find_entries(all_users, start)
total_overlaps = 0
# make some bad entries
num_days = 5
self.make_entry_bulk(self.all_users, num_days)
while True:
try:
user_entries = next(entries)
user_overlaps = check_entries.Command().check_entry(
user_entries, verbosity=0)
total_overlaps += user_overlaps
except StopIteration:
self.assertEqual(
total_overlaps, num_days * len(self.all_users))
return
| mit | 14d3382d2444f9a670c6de0c09f603c9 | 39.621212 | 86 | 0.595549 | 4.049849 | false | true | false | false |
caktus/django-timepiece | timepiece/crm/migrations/0003_auto_20151119_0906.py | 1 | 1361 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('crm', '0002_auto_20150115_1654'),
]
operations = [
migrations.AlterField(
model_name='attribute',
name='enable_timetracking',
field=models.BooleanField(default=False, help_text='Enable time tracking functionality for projects with this type or status.'),
),
migrations.AlterField(
model_name='attribute',
name='type',
field=models.CharField(choices=[('project-type', 'Project Type'), ('project-status', 'Project Status')], max_length=32),
),
migrations.AlterField(
model_name='business',
name='email',
field=models.EmailField(blank=True, max_length=254),
),
migrations.AlterField(
model_name='project',
name='activity_group',
field=models.ForeignKey(verbose_name='restrict activities to', null=True, blank=True, to='entries.ActivityGroup', related_name='activity_group'),
),
migrations.AlterField(
model_name='project',
name='tracker_url',
field=models.CharField(default='', blank=True, max_length=255),
),
]
| mit | 67499592b46832c28249e1951c30ffaa | 33.897436 | 157 | 0.590742 | 4.536667 | false | false | false | false |
caktus/django-timepiece | timepiece/entries/forms.py | 1 | 7826 | import datetime
from dateutil.relativedelta import relativedelta
from django import forms
from django.db.models import Q
from selectable import forms as selectable
from timepiece import utils
from timepiece.crm.models import Project, ProjectRelationship
from timepiece.entries.models import Entry, Location, ProjectHours
from timepiece.entries.lookups import ActivityLookup
from timepiece.forms import (
INPUT_FORMATS, TimepieceSplitDateTimeField, TimepieceDateInput)
class ClockInForm(forms.ModelForm):
active_comment = forms.CharField(
label='Notes for the active entry', widget=forms.Textarea,
required=False)
start_time = TimepieceSplitDateTimeField(required=False)
class Meta:
model = Entry
fields = ('active_comment', 'location', 'project', 'activity',
'start_time', 'comments')
widgets = {
'activity': selectable.AutoComboboxSelectWidget(lookup_class=ActivityLookup),
}
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
self.active = kwargs.pop('active', None)
initial = kwargs.get('initial', {})
default_loc = utils.get_setting('TIMEPIECE_DEFAULT_LOCATION_SLUG')
if default_loc:
try:
loc = Location.objects.get(slug=default_loc)
except Location.DoesNotExist:
loc = None
if loc:
initial['location'] = loc.pk
project = initial.get('project', None)
try:
last_project_entry = Entry.objects.filter(
user=self.user, project=project).order_by('-end_time')[0]
except IndexError:
initial['activity'] = None
else:
initial['activity'] = last_project_entry.activity.pk
super(ClockInForm, self).__init__(*args, **kwargs)
self.fields['start_time'].initial = datetime.datetime.now()
self.fields['project'].queryset = Project.trackable.filter(
users=self.user)
if not self.active:
self.fields.pop('active_comment')
else:
self.fields['active_comment'].initial = self.active.comments
self.instance.user = self.user
def clean_start_time(self):
"""
Make sure that the start time doesn't come before the active entry
"""
start = self.cleaned_data.get('start_time')
if not start:
return start
active_entries = self.user.timepiece_entries.filter(
start_time__gte=start, end_time__isnull=True)
for entry in active_entries:
output = ('The start time is on or before the current entry: '
'%s - %s starting at %s' % (entry.project, entry.activity,
entry.start_time.strftime('%H:%M:%S')))
raise forms.ValidationError(output)
return start
def clean(self):
start_time = self.clean_start_time()
data = self.cleaned_data
if not start_time:
return data
if self.active:
self.active.unpause()
self.active.comments = data['active_comment']
self.active.end_time = start_time - relativedelta(seconds=1)
if not self.active.clean():
raise forms.ValidationError(data)
return data
def save(self, commit=True):
self.instance.hours = 0
entry = super(ClockInForm, self).save(commit=commit)
if self.active and commit:
self.active.save()
return entry
class ClockOutForm(forms.ModelForm):
start_time = TimepieceSplitDateTimeField()
end_time = TimepieceSplitDateTimeField()
class Meta:
model = Entry
fields = ('location', 'start_time', 'end_time', 'comments')
def __init__(self, *args, **kwargs):
kwargs['initial'] = kwargs.get('initial', None) or {}
kwargs['initial']['end_time'] = datetime.datetime.now()
super(ClockOutForm, self).__init__(*args, **kwargs)
def save(self, commit=True):
entry = super(ClockOutForm, self).save(commit=False)
entry.unpause(entry.end_time)
if commit:
entry.save()
return entry
class AddUpdateEntryForm(forms.ModelForm):
start_time = TimepieceSplitDateTimeField()
end_time = TimepieceSplitDateTimeField()
class Meta:
model = Entry
exclude = ('user', 'pause_time', 'site', 'hours', 'status',
'entry_group')
widgets = {
'activity': selectable.AutoComboboxSelectWidget(lookup_class=ActivityLookup),
}
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
self.acting_user = kwargs.pop('acting_user')
super(AddUpdateEntryForm, self).__init__(*args, **kwargs)
self.instance.user = self.user
self.fields['project'].queryset = Project.trackable.filter(
users=self.user)
# If editing the active entry, remove the end_time field.
if self.instance.start_time and not self.instance.end_time:
self.fields.pop('end_time')
def clean(self):
"""
If we're not editing the active entry, ensure that this entry doesn't
conflict with or come after the active entry.
"""
active = utils.get_active_entry(self.user)
start_time = self.cleaned_data.get('start_time', None)
end_time = self.cleaned_data.get('end_time', None)
if active and active.pk != self.instance.pk:
if (start_time and start_time > active.start_time) or \
(end_time and end_time > active.start_time):
raise forms.ValidationError(
'The start time or end time conflict with the active '
'entry: {activity} on {project} starting at '
'{start_time}.'.format(
project=active.project,
activity=active.activity,
start_time=active.start_time.strftime('%H:%M:%S'),
))
month_start = utils.get_month_start(start_time)
next_month = month_start + relativedelta(months=1)
entries = self.instance.user.timepiece_entries.filter(
Q(status=Entry.APPROVED) | Q(status=Entry.INVOICED),
start_time__gte=month_start,
end_time__lt=next_month
)
entry = self.instance
if not self.acting_user.is_superuser:
if (entries.exists() and not entry.id or entry.id and entry.status == Entry.INVOICED):
message = 'You cannot add/edit entries after a timesheet has been ' \
'approved or invoiced. Please correct the start and end times.'
raise forms.ValidationError(message)
return self.cleaned_data
class ProjectHoursForm(forms.ModelForm):
class Meta:
model = ProjectHours
fields = ['week_start', 'project', 'user', 'hours', 'published']
def save(self, commit=True):
ph = super(ProjectHoursForm, self).save()
# since hours are being assigned to a user, add the user
# to the project if they are not already in it so they can track time
ProjectRelationship.objects.get_or_create(user=self.cleaned_data['user'],
project=self.cleaned_data['project'])
return ph
class ProjectHoursSearchForm(forms.Form):
week_start = forms.DateField(
label='Week of', required=False,
input_formats=INPUT_FORMATS, widget=TimepieceDateInput())
def clean_week_start(self):
week_start = self.cleaned_data.get('week_start', None)
return utils.get_week_start(week_start, False) if week_start else None
| mit | c0ff9ab692d35f8202968bf07bd28322 | 36.990291 | 98 | 0.600946 | 4.221143 | false | false | false | false |
caktus/django-timepiece | timepiece/contracts/migrations/0003_auto_20151119_0906.py | 1 | 1396 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contracts', '0002_auto_20150115_1654'),
]
operations = [
migrations.AlterField(
model_name='contracthour',
name='status',
field=models.IntegerField(default=1, choices=[(1, 'Pending'), (2, 'Approved')]),
),
migrations.AlterField(
model_name='entrygroup',
name='number',
field=models.CharField(verbose_name='Reference #', blank=True, null=True, max_length=50),
),
migrations.AlterField(
model_name='entrygroup',
name='status',
field=models.CharField(default='invoiced', choices=[('invoiced', 'Invoiced'), ('not-invoiced', 'Not Invoiced')], max_length=24),
),
migrations.AlterField(
model_name='projectcontract',
name='status',
field=models.CharField(default='upcoming', choices=[('upcoming', 'Upcoming'), ('current', 'Current'), ('complete', 'Complete')], max_length=32),
),
migrations.AlterField(
model_name='projectcontract',
name='type',
field=models.IntegerField(choices=[(1, 'Fixed'), (2, 'Pre-paid Hourly'), (3, 'Post-paid Hourly')]),
),
]
| mit | de703e7b71beffa828342d5b94dca63a | 34.794872 | 156 | 0.567335 | 4.217523 | false | false | false | false |
openelections/openelections-core | openelex/us/sc/datasource.py | 1 | 5065 | from future import standard_library
standard_library.install_aliases()
from os.path import join
import json
import datetime
import urllib.parse
import clarify
from openelex import PROJECT_ROOT
from openelex.base.datasource import BaseDatasource
from openelex.lib import build_raw_github_url
class Datasource(BaseDatasource):
# PUBLIC INTERFACE
def mappings(self, year=None):
"""Return array of dicts containing source url and
standardized filename for raw results file, along
with other pieces of metadata
"""
mappings = []
for yr, elecs in list(self.elections(year).items()):
mappings.extend(self._build_metadata(yr, elecs))
return mappings
def target_urls(self, year=None):
"Get list of source data urls, optionally filtered by year"
return [item['raw_url'] for item in self.mappings(year)]
def filename_url_pairs(self, year=None):
return [(mapping['generated_filename'], self._url_for_fetch(mapping))
for mapping in self.mappings(year)]
def _url_for_fetch(self, mapping):
try:
return mapping['url']
except KeyError:
return # github url
def mappings_for_url(self, url):
return [mapping for mapping in self.mappings() if mapping['raw_url'] == url]
# PRIVATE METHODS
def _build_metadata(self, year, elections):
meta = []
year_int = int(year)
for election in elections:
if election['special'] == True:
results = [x for x in self._url_paths() if x['date'] == election['start_date'] and x['special'] == True]
for result in results:
ocd_id = 'ocd-division/country:us/state:sc'
generated_filename = self._generates_special_filename(result)
pre_processed_url = result['url']
meta.append({
"generated_filename": generated_filename,
"raw_url": election['direct_links'][0],
"pre_processed_url": None,
"ocd_id": ocd_id,
"name": 'South Carolina',
"election": election['slug']
})
elif 'summary.html' in election['direct_links'][0]:
# county-level results
j = clarify.Jurisdiction(url=election['direct_links'][0], level='state') # check for clarity url
ocd_id = 'ocd-division/country:us/state:sc'
generated_filename = self._generate_county_filename(election)
meta.append({
"generated_filename": generated_filename,
"pre_processed_url": None,
"raw_url": j.report_url('xml'),
"ocd_id": ocd_id,
"name": 'South Carolina',
"election": election['slug']
})
# precinct-level results, one file per county
subs = j.get_subjurisdictions()
for county in self._jurisdictions():
try:
subj = [s for s in subs if s.name.strip() == county['name'].strip()][0]
generated_filename = self._generate_precinct_filename(election, county)
meta.append({
"generated_filename": generated_filename,
"pre_processed_url": None,
"raw_url": subj.report_url('xml'),
"ocd_id": county['ocd_id'],
"name": county['name'],
"election": election['slug']
})
except IndexError:
continue
return meta
def _generate_county_filename(self, election):
bits = [
election['start_date'].replace('-',''),
self.state,
]
if election['special']:
bits.append('special')
bits.extend([
election['race_type'].replace('-','_').lower()
])
filename = "__".join(bits) + '.xml'
return filename
def _generate_precinct_filename(self, election, county):
bits = [
election['start_date'].replace('-',''),
self.state
]
if election['special']:
bits.append('special')
bits.extend([
election['race_type'].replace('-','_').lower(),
county['name'].lower(),
'precinct'
])
filename = "__".join(bits) + '.xml'
return filename
def _jurisdictions(self):
"""South Carolina counties"""
m = self.jurisdiction_mappings()
mappings = [x for x in m if x['name'] != ""]
return mappings
def _url_for_fetch(self, mapping):
if mapping['pre_processed_url']:
return mapping['pre_processed_url']
else:
return mapping['raw_url']
| mit | eed722aedd382d9bbe3d4d7d7f4f1951 | 37.371212 | 120 | 0.517473 | 4.355116 | false | false | false | false |
openelections/openelections-core | openelex/us/pa/datasource.py | 1 | 3643 | """
Standardize names of data files from the Pennsylvania Secretary of State.
The state offers CSV files containing precinct-level results for regularly scheduled primary and general elections; these were split from a single
zip file into election-specific files and thus have no `raw_url` attribute. Special elections are pre-processed CSV files from HTML files. All files
are available in the https://github.com/openelections/openelections-data-pa repository.
"""
from future import standard_library
standard_library.install_aliases()
from os.path import join
import json
import unicodecsv
import urllib.parse
import requests
from openelex import PROJECT_ROOT
from openelex.base.datasource import BaseDatasource
from openelex.lib import build_github_url
class Datasource(BaseDatasource):
# PUBLIC INTERFACE
def mappings(self, year=None):
"""Return array of dicts containing source url and
standardized filename for raw results file, along
with other pieces of metadata
"""
mappings = []
for yr, elecs in list(self.elections(year).items()):
mappings.extend(self._build_metadata(yr, elecs))
return mappings
def target_urls(self, year=None):
"Get list of source data urls, optionally filtered by year"
return [item['raw_url'] for item in self.mappings(year)]
def filename_url_pairs(self, year=None):
return [(item['generated_filename'], self._url_for_fetch(item))
for item in self.mappings(year)]
# PRIVATE METHODS
def _build_metadata(self, year, elections):
meta = []
year_int = int(year)
for election in elections:
if election['special']:
results = [x for x in self._url_paths() if x['date'] == election['start_date'] and x['special'] == True]
else:
results = [x for x in self._url_paths() if x['date'] == election['start_date'] and x['special'] == False]
for result in results:
if election['direct_links']:
raw_url = election['direct_links'][0]
else:
raw_url = None
generated_filename = self._generate_filename(election['start_date'], election['race_type'], result)
meta.append({
"generated_filename": generated_filename,
"raw_url": raw_url,
"pre_processed_url": build_github_url(self.state, generated_filename),
"ocd_id": 'ocd-division/country:us/state:pa',
"name": 'Pennsylvania',
"election": election['slug']
})
return meta
def _generate_filename(self, start_date, election_type, result):
if result['district'] == '':
office = result['office']
else:
office = result['office'] + '__' + result['district']
if result['special']:
election_type = 'special__' + election_type
else:
election_type = election_type+'__precinct'
bits = [
start_date.replace('-',''),
self.state.lower(),
election_type,
office
]
if office == '':
bits.remove(office)
name = "__".join(bits) + '.csv'
return name
def _jurisdictions(self):
"""Pennsylvania counties"""
return self.jurisdiction_mappings()
def _url_for_fetch(self, mapping):
if mapping['pre_processed_url']:
return mapping['pre_processed_url']
else:
return mapping['raw_url']
| mit | 9de31558306c8140b51a3932b26e0e97 | 37.347368 | 148 | 0.595388 | 4.135074 | false | false | false | false |
openelections/openelections-core | openelex/us/la/datasource.py | 1 | 3897 | """
Louisiana has pre-processed county-level CSV files available on Github at
https://github.com/openelections/openelections-data-la.
"""
from future import standard_library
standard_library.install_aliases()
from builtins import str
from os.path import join
import json
import datetime
import urllib.parse
from openelex import PROJECT_ROOT
from openelex.base.datasource import BaseDatasource
from openelex.lib import build_raw_github_url
class Datasource(BaseDatasource):
# PUBLIC INTERFACE
def mappings(self, year=None):
"""Return array of dicts containing source url and
standardized filename for raw results file, along
with other pieces of metadata
"""
mappings = []
for yr, elecs in list(self.elections(year).items()):
mappings.extend(self._build_metadata(yr, elecs))
return mappings
def target_urls(self, year=None):
"Get list of source data urls, optionally filtered by year"
return [item['raw_url'] for item in self.mappings(year)]
def filename_url_pairs(self, year=None):
return [(mapping['generated_filename'], self._url_for_fetch(mapping))
for mapping in self.mappings(year)]
def _url_for_fetch(self, mapping):
try:
return mapping['url']
except KeyError:
return # github url
def mappings_for_url(self, url):
return [mapping for mapping in self.mappings() if mapping['url'] == url]
# PRIVATE METHODS
def _build_metadata(self, year, elections):
meta = []
year_int = int(year)
for election in elections:
# parish-level file
jurisdiction = 'ocd-division/country:us/state:la'
generated_filename = self._generate_parish_filename(election)
meta.append({
"generated_filename": generated_filename,
"raw_url": election['portal_link'],
"pre_processed_url": build_raw_github_url(self.state, str(year), generated_filename),
"ocd_id": jurisdiction,
"name": 'Louisiana',
"election": election['slug']
})
# precinct-level file
jurisdiction = 'ocd-division/country:us/state:la'
generated_filename = self._generate_precinct_filename(election)
meta.append({
"generated_filename": generated_filename,
"raw_url": election['portal_link'],
"pre_processed_url": build_raw_github_url(self.state, str(year), generated_filename),
"ocd_id": jurisdiction,
"name": 'Louisiana',
"election": election['slug']
})
return meta
def _generate_parish_filename(self, election):
bits = [
election['start_date'].replace('-',''),
self.state,
]
if election['special']:
bits.append('special')
bits.extend([
election['race_type'].replace('-','_').lower()
])
filename = "__".join(bits) + '.csv'
return filename
def _generate_precinct_filename(self, election):
bits = [
election['start_date'].replace('-',''),
self.state,
]
if election['special']:
bits.append('special')
bits.extend([
election['race_type'].replace('-','_').lower(),
'precinct'
])
filename = "__".join(bits) + '.csv'
return filename
def _jurisdictions(self):
"""Louisiana parishes"""
m = self.jurisdiction_mappings()
mappings = [x for x in m if x['name'] != ""]
return mappings
def _url_for_fetch(self, mapping):
if mapping['pre_processed_url']:
return mapping['pre_processed_url']
else:
return mapping['raw_url']
| mit | 70cf568dfc5ae2475b790404f0e35020 | 33.184211 | 101 | 0.57788 | 4.07636 | false | false | false | false |
openelections/openelections-core | openelex/us/vt/datasource.py | 1 | 8965 | """
In VT, we have to search for elections on the vermont secretary of state website.
We will be given some election id, we can then query election results with:
http://vtelectionarchive.sec.state.vt.us/elections/download/%election-id%/precincts_include:0/
or
http://vtelectionarchive.sec.state.vt.us/elections/download/%election-id%/precincts_include:1/
To run mappings from a shell:
openelex datasource.mappings -s vt
"""
from __future__ import print_function
from future import standard_library
standard_library.install_aliases()
from builtins import str
import re
import urllib.request, urllib.error, urllib.parse
from bs4 import BeautifulSoup
from string import Template
from multiprocessing.pool import ThreadPool
from datetime import datetime
from openelex.lib import format_date, standardized_filename
from openelex.base.datasource import BaseDatasource
class Datasource(BaseDatasource):
# PUBLIC INTERFACE
def mappings(self, year=None):
print((str(datetime.now()), "mappings begin"))
"""Return array of dicts containing source url and
standardized filename for raw results file, along
with other pieces of metadata
"""
mappings = []
for yr, elecs in list(self.elections(year).items()):
mappings.extend(self._build_metadata(yr, elecs))
print((str(datetime.now()), "mappings end"))
return mappings
def target_urls(self, year=None):
"Get list of source data urls, optionally filtered by year"
return [item['raw_url'] for item in self.mappings(year)]
def filename_url_pairs(self, year=None):
return [(item['generated_filename'], item['raw_url'])
for item in self.mappings(year)]
# PRIVATE METHODS
def _races_by_type(self, elections):
"Filter races by type and add election slug"
races = {
'special': None,
}
for elec in elections:
rtype = self._race_type(elec)
elec['slug'] = self._election_slug(elec)
races[rtype] = elec
return races['general'], races['primary'], races['special']
def _race_type(self, election):
if election['special']:
return 'special'
return election['race_type'].lower()
def _jurisdictions(self):
m = self.jurisdiction_mappings()
mappings = [x for x in m if x['url_name'] != ""]
return mappings
def _jurisdictionOcdMap(self, _patern):
m = self.jurisdiction_mappings()
mappings = [x for x in m if _patern in x['ocd_id']]
return mappings
def _jurisdictionOcdMapForStateSenate(self):
return self._jurisdictionOcdMap("sldu")
def _jurisdictionOcdMapForStateRep(self):
return self._jurisdictionOcdMap("sldl")
def _jurisdictionOcdMapForStateWide(self):
m = self.jurisdiction_mappings()
mappings = [x for x in m if x['fips'] == "50"]
return mappings
def _build_metadata(self, year, elections):
year_int = int(year)
meta = []
meta += self._state_leg_meta(year, elections, self.StateWideOfficeId, self._jurisdictionOcdMapForStateWide())
meta += self._state_leg_meta(year, elections, [self.STATE_SENATE_ELEC_OFFICE_ID], self._jurisdictionOcdMapForStateSenate())
meta += self._state_leg_meta(year, elections, [self.STATE_REP_ELEC_OFFICE_ID], self._jurisdictionOcdMapForStateRep())
return meta
#US Races
PRESIDENT_ELEC_OFFICE_ID = 1
US_SENATE_ELEC_OFFICE_ID = 6
US_HOUSE_ELEC_OFFICE_ID = 5
# State wide sate races
STATE_GOV_ELEC_OFFICE_ID = 3
STATE_LT_GOV_ELEC_OFFICE_ID = 4
STATE_TREASURER_ELEC_OFFICE_ID = 53
STATE_SEC_OF_STATE_ELEC_OFFICE_ID = 44
STATE_AUDITOR_ELEC_OFFICE_ID = 13
STATE_ATTORNEY_GEN_ELEC_OFFICE_ID = 12
STATE_TREASURER_ELEC_OFFICE_ID = 53
# State office per district
STATE_SENATE_ELEC_OFFICE_ID = 9
STATE_REP_ELEC_OFFICE_ID = 8
#TODO County officials.
StateWideOfficeId = [
PRESIDENT_ELEC_OFFICE_ID,
US_SENATE_ELEC_OFFICE_ID,
US_HOUSE_ELEC_OFFICE_ID,
STATE_GOV_ELEC_OFFICE_ID,
STATE_LT_GOV_ELEC_OFFICE_ID,
STATE_TREASURER_ELEC_OFFICE_ID,
STATE_SEC_OF_STATE_ELEC_OFFICE_ID,
STATE_AUDITOR_ELEC_OFFICE_ID,
STATE_ATTORNEY_GEN_ELEC_OFFICE_ID,
]
OfficeIdToOfficeNameMap = {
PRESIDENT_ELEC_OFFICE_ID: "president",
US_SENATE_ELEC_OFFICE_ID: "senate",
US_HOUSE_ELEC_OFFICE_ID: "house",
STATE_GOV_ELEC_OFFICE_ID: "governor",
STATE_LT_GOV_ELEC_OFFICE_ID: "lieutenant_governor",
STATE_TREASURER_ELEC_OFFICE_ID: "treasurer",
STATE_SEC_OF_STATE_ELEC_OFFICE_ID: "secretary_of_state",
STATE_AUDITOR_ELEC_OFFICE_ID: "auditor",
STATE_ATTORNEY_GEN_ELEC_OFFICE_ID: "attorney_general",
STATE_SENATE_ELEC_OFFICE_ID: "state_senate",
STATE_REP_ELEC_OFFICE_ID: "state_house"
}
search_url_expr = "http://vtelectionarchive.sec.state.vt.us/elections/search/year_from:$year/year_to:$year/office_id:$office_id"
electionViewUrl = "http://vtelectionarchive.sec.state.vt.us/elections/download/$electionId/precincts_include:$isPrecinct/"
def _getElectionViewUrl(self, elecId, isPrecinct):
return Template(self.electionViewUrl).substitute(electionId=elecId, isPrecinct=(1 if isPrecinct else 0))
def _getElectionList(self, year, officeId):
payload = []
search_url = Template(self.search_url_expr).substitute(year=year, office_id=officeId)
response = urllib.request.urlopen(search_url)
html_doc = response.read()
# load election from search results.
soup = BeautifulSoup(html_doc, 'html.parser')
resultTableRows = soup.find_all(id=re.compile("election-id-*"))
for resRow in resultTableRows:
elemId = resRow.get('id')
elemElectId = elemId.split('-')[-1]
resCols = resRow.find_all('td')
year, office, district, stage = resCols[0].text,resCols[1].text,resCols[2].text,resCols[3].text
payload.append({'officeId': officeId,'id': elemElectId, 'year': year, 'office': office, 'district': district, 'stage': stage})
return officeId, payload
def _findElecByType(self, elecDict, str):
return [e for e in elecDict if e['stage'] == str]
def _officeNameFromId(self, officeId):
return self.OfficeIdToOfficeNameMap[officeId]
def _state_leg_meta(self, year, elections, officeIds, jurisdictions):
year_int = int(year)
payload = []
general, primary, special = self._races_by_type(elections)
electByPosition = ThreadPool(20).imap_unordered( lambda x: self._getElectionList(year_int, x), officeIds)
for positionId, elecDict in electByPosition:
for precinct_val in (True, False):
if general:
genElections = self._findElecByType(elecDict, "General Election")
for genElect in genElections:
payload.append(self._generatedOneStateLegElectionMetaData(genElect, general, None, jurisdictions, precinct_val))
if primary:
for party in ['Democratic', 'Republican', 'Progressive', 'Liberty Union']:
electionType = party + " Primary"
primElections = self._findElecByType(elecDict, electionType)
for primElect in primElections:
payload.append(self._generatedOneStateLegElectionMetaData(primElect, primary, party, jurisdictions, precinct_val))
return payload
def _generatedOneStateLegElectionMetaData(self, elecVt, election, primaryParty, jurisdictions, isPrecinct):
raceType = "primary" if primaryParty else "general"
office = self._officeNameFromId(elecVt['officeId'])
jurisdiction = elecVt['district']
if jurisdiction == "Statewide":
jurisdiction = "Vermont"
generatedFileName = standardized_filename('vt', election['start_date'], '.csv',
party=primaryParty,
race_type=raceType,
reporting_level= "precinct" if isPrecinct else None,
jurisdiction=jurisdiction,
office=office)
meta = {
'name' : jurisdiction,
'raw_url' : self._getElectionViewUrl(elecVt['id'], isPrecinct),
'generated_filename' : generatedFileName,
'election' : election['slug'],
# specific for VT
'isPrecinct' : isPrecinct,
'office' : office,
'isPrimary' : raceType == "general",
'primaryParty' : primaryParty,
'officeDistrict': jurisdiction,
}
return meta
| mit | d9a98bacffb2bb333de812e906f3b396 | 40.313364 | 142 | 0.631456 | 3.351402 | false | false | false | false |
viblo/pymunk | pymunk/shapes.py | 1 | 25479 | __docformat__ = "reStructuredText"
import logging
from typing import TYPE_CHECKING, List, Optional, Sequence, Tuple
if TYPE_CHECKING:
from .body import Body
from .space import Space
from ._chipmunk_cffi import ffi
from ._chipmunk_cffi import lib as cp
from ._pickle import PickleMixin, _State
from ._typing_attr import TypingAttrMixing
from .bb import BB
from .contact_point_set import ContactPointSet
from .query_info import PointQueryInfo, SegmentQueryInfo
from .shape_filter import ShapeFilter
from .transform import Transform
from .vec2d import Vec2d
_logger = logging.getLogger(__name__)
class Shape(PickleMixin, TypingAttrMixing, object):
"""Base class for all the shapes.
You usually dont want to create instances of this class directly but use
one of the specialized shapes instead (:py:class:`Circle`,
:py:class:`Poly` or :py:class:`Segment`).
All the shapes can be copied and pickled. If you copy/pickle a shape the
body (if any) will also be copied.
"""
_pickle_attrs_init = PickleMixin._pickle_attrs_init + ["body"]
_pickle_attrs_general = PickleMixin._pickle_attrs_general + [
"sensor",
"collision_type",
"filter",
"elasticity",
"friction",
"surface_velocity",
]
_pickle_attrs_skip = PickleMixin._pickle_attrs_skip + ["mass", "density"]
_space = None # Weak ref to the space holding this body (if any)
_id_counter = 1
def __init__(self, shape: "Shape") -> None:
self._shape = shape
self._body: Optional["Body"] = shape.body
def _init(self, body: Optional["Body"], _shape: ffi.CData) -> None:
self._body = body
if body is not None:
body._shapes.add(self)
def shapefree(cp_shape): # type: ignore
_logger.debug("shapefree start %s", cp_shape)
cp_space = cp.cpShapeGetSpace(cp_shape)
if cp_space != ffi.NULL:
_logger.debug("shapefree remove from space %s %s", cp_space, cp_shape)
cp.cpSpaceRemoveShape(cp_space, cp_shape)
_logger.debug("shapefree get body %s", cp_shape)
cp_body = cp.cpShapeGetBody(cp_shape)
if cp_body != ffi.NULL:
_logger.debug("shapefree set body %s", cp_shape)
#print(cp.cpShapeActive2(cp_shape))
cp.cpShapeSetBody(cp_shape, ffi.NULL)
_logger.debug("shapefree free %s", cp_shape)
cp.cpShapeFree(cp_shape)
self._shape = ffi.gc(_shape, shapefree)
self._set_id()
@property
def _id(self) -> int:
"""Unique id of the Shape
.. note::
Experimental API. Likely to change in future major, minor orpoint
releases.
"""
return int(ffi.cast("int", cp.cpShapeGetUserData(self._shape)))
def _set_id(self) -> None:
cp.cpShapeSetUserData(self._shape, ffi.cast("cpDataPointer", Shape._id_counter))
Shape._id_counter += 1
def _get_mass(self) -> float:
return cp.cpShapeGetMass(self._shape)
def _set_mass(self, mass: float) -> None:
cp.cpShapeSetMass(self._shape, mass)
mass = property(
_get_mass,
_set_mass,
doc="""The mass of this shape.
This is useful when you let Pymunk calculate the total mass and inertia
of a body from the shapes attached to it. (Instead of setting the body
mass and inertia directly)
""",
)
def _get_density(self) -> float:
return cp.cpShapeGetDensity(self._shape)
def _set_density(self, density: float) -> None:
cp.cpShapeSetDensity(self._shape, density)
density = property(
_get_density,
_set_density,
doc="""The density of this shape.
This is useful when you let Pymunk calculate the total mass and inertia
of a body from the shapes attached to it. (Instead of setting the body
mass and inertia directly)
""",
)
@property
def moment(self) -> float:
"""The calculated moment of this shape."""
return cp.cpShapeGetMoment(self._shape)
@property
def area(self) -> float:
"""The calculated area of this shape."""
return cp.cpShapeGetArea(self._shape)
@property
def center_of_gravity(self) -> Vec2d:
"""The calculated center of gravity of this shape."""
v = cp.cpShapeGetCenterOfGravity(self._shape)
return Vec2d(v.x, v.y)
def _get_sensor(self) -> bool:
return bool(cp.cpShapeGetSensor(self._shape))
def _set_sensor(self, is_sensor: bool) -> None:
cp.cpShapeSetSensor(self._shape, is_sensor)
sensor = property(
_get_sensor,
_set_sensor,
doc="""A boolean value if this shape is a sensor or not.
Sensors only call collision callbacks, and never generate real
collisions.
""",
)
def _get_collision_type(self) -> int:
return cp.cpShapeGetCollisionType(self._shape)
def _set_collision_type(self, t: int) -> None:
cp.cpShapeSetCollisionType(self._shape, t)
collision_type = property(
_get_collision_type,
_set_collision_type,
doc="""User defined collision type for the shape.
See :py:meth:`Space.add_collision_handler` function for more
information on when to use this property.
""",
)
def _get_filter(self) -> ShapeFilter:
f = cp.cpShapeGetFilter(self._shape)
return ShapeFilter(f.group, f.categories, f.mask)
def _set_filter(self, f: ShapeFilter) -> None:
cp.cpShapeSetFilter(self._shape, f)
filter = property(
_get_filter,
_set_filter,
doc="""Set the collision :py:class:`ShapeFilter` for this shape.
""",
)
def _get_elasticity(self) -> float:
return cp.cpShapeGetElasticity(self._shape)
def _set_elasticity(self, e: float) -> None:
cp.cpShapeSetElasticity(self._shape, e)
elasticity = property(
_get_elasticity,
_set_elasticity,
doc="""Elasticity of the shape.
A value of 0.0 gives no bounce, while a value of 1.0 will give a
'perfect' bounce. However due to inaccuracies in the simulation
using 1.0 or greater is not recommended.
""",
)
def _get_friction(self) -> float:
return cp.cpShapeGetFriction(self._shape)
def _set_friction(self, u: float) -> None:
cp.cpShapeSetFriction(self._shape, u)
friction = property(
_get_friction,
_set_friction,
doc="""Friction coefficient.
Pymunk uses the Coulomb friction model, a value of 0.0 is
frictionless.
A value over 1.0 is perfectly fine.
Some real world example values from Wikipedia (Remember that
it is what looks good that is important, not the exact value).
============== ====== ========
Material Other Friction
============== ====== ========
Aluminium Steel 0.61
Copper Steel 0.53
Brass Steel 0.51
Cast iron Copper 1.05
Cast iron Zinc 0.85
Concrete (wet) Rubber 0.30
Concrete (dry) Rubber 1.0
Concrete Wood 0.62
Copper Glass 0.68
Glass Glass 0.94
Metal Wood 0.5
Polyethene Steel 0.2
Steel Steel 0.80
Steel Teflon 0.04
Teflon (PTFE) Teflon 0.04
Wood Wood 0.4
============== ====== ========
""",
)
def _get_surface_velocity(self) -> Vec2d:
v = cp.cpShapeGetSurfaceVelocity(self._shape)
return Vec2d(v.x, v.y)
def _set_surface_velocity(self, surface_v: Vec2d) -> None:
assert len(surface_v) == 2
cp.cpShapeSetSurfaceVelocity(self._shape, surface_v)
surface_velocity = property(
_get_surface_velocity,
_set_surface_velocity,
doc="""The surface velocity of the object.
Useful for creating conveyor belts or players that move around. This
value is only used when calculating friction, not resolving the
collision.
""",
)
def _get_body(self) -> Optional["Body"]:
return self._body
def _set_body(self, body: Optional["Body"]) -> None:
if self._body is not None:
self._body._shapes.remove(self)
body_body = ffi.NULL if body is None else body._body
cp.cpShapeSetBody(self._shape, body_body)
if body is not None:
body._shapes.add(self)
self._body = body
body = property(
_get_body,
_set_body,
doc="""The body this shape is attached to. Can be set to None to
indicate that this shape doesnt belong to a body.""",
)
def update(self, transform: Transform) -> BB:
"""Update, cache and return the bounding box of a shape with an
explicit transformation.
Useful if you have a shape without a body and want to use it for
querying.
"""
_bb = cp.cpShapeUpdate(self._shape, transform)
return BB(_bb.l, _bb.b, _bb.r, _bb.t)
def cache_bb(self) -> BB:
"""Update and returns the bounding box of this shape"""
_bb = cp.cpShapeCacheBB(self._shape)
return BB(_bb.l, _bb.b, _bb.r, _bb.t)
@property
def bb(self) -> BB:
"""The bounding box :py:class:`BB` of the shape.
Only guaranteed to be valid after :py:meth:`Shape.cache_bb` or
:py:meth:`Space.step` is called. Moving a body that a shape is
connected to does not update it's bounding box. For shapes used for
queries that aren't attached to bodies, you can also use
:py:meth:`Shape.update`.
"""
_bb = cp.cpShapeGetBB(self._shape)
return BB(_bb.l, _bb.b, _bb.r, _bb.t)
def point_query(self, p: Tuple[float, float]) -> PointQueryInfo:
"""Check if the given point lies within the shape.
A negative distance means the point is within the shape.
:return: Tuple of (distance, info)
:rtype: (float, :py:class:`PointQueryInfo`)
"""
assert len(p) == 2
info = ffi.new("cpPointQueryInfo *")
_ = cp.cpShapePointQuery(self._shape, p, info)
ud = int(ffi.cast("int", cp.cpShapeGetUserData(info.shape)))
assert ud == self._id
return PointQueryInfo(
self,
Vec2d(info.point.x, info.point.y),
info.distance,
Vec2d(info.gradient.x, info.gradient.y),
)
def segment_query(
self, start: Tuple[float, float], end: Tuple[float, float], radius: float = 0
) -> SegmentQueryInfo:
"""Check if the line segment from start to end intersects the shape.
:rtype: :py:class:`SegmentQueryInfo`
"""
assert len(start) == 2
assert len(end) == 2
info = ffi.new("cpSegmentQueryInfo *")
r = cp.cpShapeSegmentQuery(self._shape, start, end, radius, info)
if r:
ud = int(ffi.cast("int", cp.cpShapeGetUserData(info.shape)))
assert ud == self._id
return SegmentQueryInfo(
self,
Vec2d(info.point.x, info.point.y),
Vec2d(info.normal.x, info.normal.y),
info.alpha,
)
else:
return SegmentQueryInfo(
None,
Vec2d(info.point.x, info.point.y),
Vec2d(info.normal.x, info.normal.y),
info.alpha,
)
def shapes_collide(self, b: "Shape") -> ContactPointSet:
"""Get contact information about this shape and shape b.
:rtype: :py:class:`ContactPointSet`
"""
_points = cp.cpShapesCollide(self._shape, b._shape)
return ContactPointSet._from_cp(_points)
@property
def space(self) -> Optional["Space"]:
"""Get the :py:class:`Space` that shape has been added to (or
None).
"""
if self._space is not None:
try:
return self._space._get_self() # ugly hack because of weakref
except ReferenceError:
return None
else:
return None
def __getstate__(self) -> _State:
"""Return the state of this object
This method allows the usage of the :mod:`copy` and :mod:`pickle`
modules with this class.
"""
d = super(Shape, self).__getstate__()
if self.mass > 0:
d["general"].append(("mass", self.mass))
if self.density > 0:
d["general"].append(("density", self.density))
return d
class Circle(Shape):
"""A circle shape defined by a radius
This is the fastest and simplest collision shape
"""
_pickle_attrs_init = Shape._pickle_attrs_init + ["radius", "offset"]
def __init__(
self,
body: Optional["Body"],
radius: float,
offset: Tuple[float, float] = (0, 0),
) -> None:
"""body is the body attach the circle to, offset is the offset from the
body's center of gravity in body local coordinates.
It is legal to send in None as body argument to indicate that this
shape is not attached to a body. However, you must attach it to a body
before adding the shape to a space or used for a space shape query.
"""
assert len(offset) == 2
body_body = ffi.NULL if body is None else body._body
_shape = cp.cpCircleShapeNew(body_body, radius, offset)
self._init(body, _shape)
def unsafe_set_radius(self, r: float) -> None:
"""Unsafe set the radius of the circle.
.. note::
This change is only picked up as a change to the position
of the shape's surface, but not it's velocity. Changing it will
not result in realistic physical behavior. Only use if you know
what you are doing!
"""
cp.cpCircleShapeSetRadius(self._shape, r)
@property
def radius(self) -> float:
"""The Radius of the circle"""
return cp.cpCircleShapeGetRadius(self._shape)
def unsafe_set_offset(self, o: Tuple[float, float]) -> None:
"""Unsafe set the offset of the circle.
.. note::
This change is only picked up as a change to the position
of the shape's surface, but not it's velocity. Changing it will
not result in realistic physical behavior. Only use if you know
what you are doing!
"""
assert len(o) == 2
cp.cpCircleShapeSetOffset(self._shape, o)
@property
def offset(self) -> Vec2d:
"""Offset. (body space coordinates)"""
v = cp.cpCircleShapeGetOffset(self._shape)
return Vec2d(v.x, v.y)
class Segment(Shape):
"""A line segment shape between two points
Meant mainly as a static shape. Can be beveled in order to give them a
thickness.
"""
_pickle_attrs_init = Shape._pickle_attrs_init + ["a", "b", "radius"]
def __init__(
self,
body: Optional["Body"],
a: Tuple[float, float],
b: Tuple[float, float],
radius: float,
) -> None:
"""Create a Segment
It is legal to send in None as body argument to indicate that this
shape is not attached to a body. However, you must attach it to a body
before adding the shape to a space or used for a space shape query.
:param Body body: The body to attach the segment to
:param a: The first endpoint of the segment
:param b: The second endpoint of the segment
:param float radius: The thickness of the segment
"""
assert len(a) == 2
assert len(b) == 2
body_body = ffi.NULL if body is None else body._body
_shape = cp.cpSegmentShapeNew(body_body, a, b, radius)
self._init(body, _shape)
def _get_a(self) -> Vec2d:
v = cp.cpSegmentShapeGetA(self._shape)
return Vec2d(v.x, v.y)
a = property(_get_a, doc="""The first of the two endpoints for this segment""")
def _get_b(self) -> Vec2d:
v = cp.cpSegmentShapeGetB(self._shape)
return Vec2d(v.x, v.y)
b = property(_get_b, doc="""The second of the two endpoints for this segment""")
def unsafe_set_endpoints(
self, a: Tuple[float, float], b: Tuple[float, float]
) -> None:
"""Set the two endpoints for this segment
.. note::
This change is only picked up as a change to the position
of the shape's surface, but not it's velocity. Changing it will
not result in realistic physical behavior. Only use if you know
what you are doing!
"""
assert len(a) == 2
assert len(b) == 2
cp.cpSegmentShapeSetEndpoints(self._shape, a, b)
@property
def normal(self) -> Vec2d:
"""The normal"""
v = cp.cpSegmentShapeGetNormal(self._shape)
return Vec2d(v.x, v.y)
def unsafe_set_radius(self, r: float) -> None:
"""Set the radius of the segment
.. note::
This change is only picked up as a change to the position
of the shape's surface, but not it's velocity. Changing it will
not result in realistic physical behavior. Only use if you know
what you are doing!
"""
cp.cpSegmentShapeSetRadius(self._shape, r)
@property
def radius(self) -> float:
"""The radius/thickness of the segment"""
return cp.cpSegmentShapeGetRadius(self._shape)
def set_neighbors(
self, prev: Tuple[float, float], next: Tuple[float, float]
) -> None:
"""When you have a number of segment shapes that are all joined
together, things can still collide with the "cracks" between the
segments. By setting the neighbor segment endpoints you can tell
Chipmunk to avoid colliding with the inner parts of the crack.
"""
assert len(prev) == 2
assert len(next) == 2
cp.cpSegmentShapeSetNeighbors(self._shape, prev, next)
class Poly(Shape):
"""A convex polygon shape
Slowest, but most flexible collision shape.
"""
def __init__(
self,
body: Optional["Body"],
vertices: Sequence[Tuple[float, float]],
transform: Optional[Transform] = None,
radius: float = 0,
) -> None:
"""Create a polygon.
A convex hull will be calculated from the vertexes automatically.
Adding a small radius will bevel the corners and can significantly
reduce problems where the poly gets stuck on seams in your geometry.
It is legal to send in None as body argument to indicate that this
shape is not attached to a body. However, you must attach it to a body
before adding the shape to a space or used for a space shape query.
.. note::
Make sure to put the vertices around (0,0) or the shape might
behave strange.
Either directly place the vertices like the below example:
>>> import pymunk
>>> w, h = 10, 20
>>> vs = [(-w/2,-h/2), (w/2,-h/2), (w/2,h/2), (-w/2,h/2)]
>>> poly_good = pymunk.Poly(None, vs)
>>> print(poly_good.center_of_gravity)
Vec2d(0.0, 0.0)
Or use a transform to move them:
>>> import pymunk
>>> width, height = 10, 20
>>> vs = [(0, 0), (width, 0), (width, height), (0, height)]
>>> poly_bad = pymunk.Poly(None, vs)
>>> print(poly_bad.center_of_gravity)
Vec2d(5.0, 10.0)
>>> t = pymunk.Transform(tx=-width/2, ty=-height/2)
>>> poly_good = pymunk.Poly(None, vs, transform=t)
>>> print(poly_good.center_of_gravity)
Vec2d(0.0, 0.0)
:param Body body: The body to attach the poly to
:param [(float,float)] vertices: Define a convex hull of the polygon
with a counterclockwise winding.
:param Transform transform: Transform will be applied to every vertex.
:param float radius: Set the radius of the poly shape
"""
if transform is None:
transform = Transform.identity()
body_body = ffi.NULL if body is None else body._body
_shape = cp.cpPolyShapeNew(
body_body, len(vertices), vertices, transform, radius
)
self._init(body, _shape)
def unsafe_set_radius(self, radius: float) -> None:
"""Unsafe set the radius of the poly.
.. note::
This change is only picked up as a change to the position
of the shape's surface, but not it's velocity. Changing it will
not result in realistic physical behavior. Only use if you know
what you are doing!
"""
cp.cpPolyShapeSetRadius(self._shape, radius)
@property
def radius(self) -> float:
"""The radius of the poly shape.
Extends the poly in all directions with the given radius.
"""
return cp.cpPolyShapeGetRadius(self._shape)
@staticmethod
def create_box(
body: Optional["Body"], size: Tuple[float, float] = (10, 10), radius: float = 0
) -> "Poly":
"""Convenience function to create a box given a width and height.
The boxes will always be centered at the center of gravity of the
body you are attaching them to. If you want to create an off-center
box, you will need to use the normal constructor Poly(...).
Adding a small radius will bevel the corners and can significantly
reduce problems where the box gets stuck on seams in your geometry.
:param Body body: The body to attach the poly to
:param size: Size of the box as (width, height)
:type size: (`float, float`)
:param float radius: Radius of poly
:rtype: :py:class:`Poly`
"""
self = Poly.__new__(Poly)
body_body = ffi.NULL if body is None else body._body
_shape = cp.cpBoxShapeNew(body_body, size[0], size[1], radius)
self._init(body, _shape)
return self
@staticmethod
def create_box_bb(body: Optional["Body"], bb: BB, radius: float = 0) -> "Poly":
"""Convenience function to create a box shape from a :py:class:`BB`.
The boxes will always be centered at the center of gravity of the
body you are attaching them to. If you want to create an off-center
box, you will need to use the normal constructor Poly(..).
Adding a small radius will bevel the corners and can significantly
reduce problems where the box gets stuck on seams in your geometry.
:param Body body: The body to attach the poly to
:param BB bb: Size of the box
:param float radius: Radius of poly
:rtype: :py:class:`Poly`
"""
self = Poly.__new__(Poly)
body_body = ffi.NULL if body is None else body._body
_shape = cp.cpBoxShapeNew2(body_body, bb, radius)
self._init(body, _shape)
return self
def get_vertices(self) -> List[Vec2d]:
"""Get the vertices in local coordinates for the polygon
If you need the vertices in world coordinates then the vertices can be
transformed by adding the body position and each vertex rotated by the
body rotation in the following way::
>>> import pymunk
>>> b = pymunk.Body()
>>> b.position = 1,2
>>> b.angle = 0.5
>>> shape = pymunk.Poly(b, [(0,0), (10,0), (10,10)])
>>> for v in shape.get_vertices():
... x,y = v.rotated(shape.body.angle) + shape.body.position
... (int(x), int(y))
(1, 2)
(9, 6)
(4, 15)
:return: The vertices in local coords
:rtype: [:py:class:`Vec2d`]
"""
verts = []
l = cp.cpPolyShapeGetCount(self._shape)
for i in range(l):
v = cp.cpPolyShapeGetVert(self._shape, i)
verts.append(Vec2d(v.x, v.y))
return verts
def unsafe_set_vertices(
self,
vertices: Sequence[Tuple[float, float]],
transform: Optional[Transform] = None,
) -> None:
"""Unsafe set the vertices of the poly.
.. note::
This change is only picked up as a change to the position
of the shape's surface, but not it's velocity. Changing it will
not result in realistic physical behavior. Only use if you know
what you are doing!
"""
if transform is None:
cp.cpPolyShapeSetVertsRaw(self._shape, len(vertices), vertices)
return
cp.cpPolyShapeSetVerts(self._shape, len(vertices), vertices, transform)
def __getstate__(self) -> _State:
"""Return the state of this object
This method allows the usage of the :mod:`copy` and :mod:`pickle`
modules with this class.
"""
d = super(Poly, self).__getstate__()
d["init"].append(("vertices", self.get_vertices()))
d["init"].append(("transform", None))
d["init"].append(("radius", self.radius))
return d
| mit | 6bd401bb44366eb8a289133c421d4230 | 32.881649 | 88 | 0.583186 | 3.880445 | false | false | false | false |
viblo/pymunk | pymunk/matplotlib_util.py | 1 | 3667 | """This submodule contains helper functions to help with quick prototyping
using pymunk together with pyglet.
Intended to help with debugging and prototyping, not for actual production use
in a full application. The methods contained in this module is opinionated
about your coordinate system and not very optimized (they use batched
drawing, but there is probably room for optimizations still).
"""
__docformat__ = "reStructuredText"
from typing import TYPE_CHECKING, Any, Sequence
import matplotlib.pyplot as plt # type: ignore
import pymunk
from pymunk.space_debug_draw_options import SpaceDebugColor
from pymunk.vec2d import Vec2d
if TYPE_CHECKING:
import matplotlib as mpl
class DrawOptions(pymunk.SpaceDebugDrawOptions):
def __init__(self, ax: Any) -> None:
"""DrawOptions for space.debug_draw() to draw a space on a ax object.
Typical usage::
>>> import matplotlib as mpl
>>> import matplotlib.pyplot as plt
>>> import pymunk
>>> import pymunk.matplotlib_util
>>> space = pymunk.Space()
>>> ax = plt.subplot()
>>> options = pymunk.matplotlib_util.DrawOptions(ax)
>>> space.debug_draw(options)
You can control the color of a Shape by setting shape.color to the color
you want it drawn in.
>>> shape = pymunk.Circle(space.static_body, 10)
>>> shape.color = (1, 0, 0, 1) # will draw shape in red
See matplotlib_util.demo.py for a full example
:Param:
ax: matplotlib.Axes
A matplotlib Axes object.
"""
super(DrawOptions, self).__init__()
self.ax = ax
def draw_circle(
self,
pos: Vec2d,
angle: float,
radius: float,
outline_color: SpaceDebugColor,
fill_color: SpaceDebugColor,
) -> None:
p = plt.Circle(
pos,
radius,
facecolor=fill_color.as_float(),
edgecolor=outline_color.as_float(),
)
self.ax.add_patch(p)
circle_edge = pos + Vec2d(radius, 0).rotated(angle)
line = plt.Line2D(
[pos.x, circle_edge.x],
[pos.y, circle_edge.y],
linewidth=1,
color=outline_color.as_float(),
)
line.set_solid_capstyle("round")
self.ax.add_line(line)
def draw_segment(self, a: Vec2d, b: Vec2d, color: SpaceDebugColor) -> None:
line = plt.Line2D([a.x, b.x], [a.y, b.y], linewidth=1, color=color.as_float())
line.set_solid_capstyle("round")
self.ax.add_line(line)
def draw_fat_segment(
self,
a: Vec2d,
b: Vec2d,
radius: float,
outline_color: SpaceDebugColor,
fill_color: SpaceDebugColor,
) -> None:
radius = max(1, 2 * radius)
line = plt.Line2D(
[a.x, b.x], [a.y, b.y], linewidth=radius, color=fill_color.as_float()
)
line.set_solid_capstyle("round")
self.ax.add_line(line)
def draw_polygon(
self,
verts: Sequence[Vec2d],
radius: float,
outline_color: SpaceDebugColor,
fill_color: SpaceDebugColor,
) -> None:
radius = max(1, 2 * radius)
p = plt.Polygon(
verts,
linewidth=radius,
joinstyle="round",
facecolor=fill_color.as_float(),
edgecolor=outline_color.as_float(),
)
self.ax.add_patch(p)
def draw_dot(self, size: float, pos: Vec2d, color: SpaceDebugColor) -> None:
p = plt.Circle(pos, size, facecolor=color.as_float(), edgecolor="None")
self.ax.add_patch(p)
| mit | 13ff98338596412faa4d71f818dc786a | 29.305785 | 86 | 0.589583 | 3.627102 | false | false | false | false |
viblo/pymunk | pymunk/bb.py | 1 | 3510 | __docformat__ = "reStructuredText"
from typing import NamedTuple, Tuple
from . import _chipmunk_cffi
lib = _chipmunk_cffi.lib
ffi = _chipmunk_cffi.ffi
from .vec2d import Vec2d
class BB(NamedTuple):
"""Simple axis-aligned 2D bounding box.
Stored as left, bottom, right, top values.
An instance can be created in this way:
>>> BB(left=1, bottom=5, right=20, top=10)
BB(left=1, bottom=5, right=20, top=10)
Or partially, for example like this:
>>> BB(right=5, top=10)
BB(left=0, bottom=0, right=5, top=10)
"""
left: float = 0
bottom: float = 0
right: float = 0
top: float = 0
@staticmethod
def newForCircle(p: Tuple[float, float], r: float) -> "BB":
"""Convenience constructor for making a BB fitting a circle at
position p with radius r.
"""
bb_ = lib.cpBBNewForCircle(p, r)
return BB(bb_.l, bb_.b, bb_.r, bb_.t)
def intersects(self, other: "BB") -> bool:
"""Returns true if the bounding boxes intersect"""
return bool(lib.cpBBIntersects(self, other))
def intersects_segment(
self, a: Tuple[float, float], b: Tuple[float, float]
) -> bool:
"""Returns true if the segment defined by endpoints a and b
intersect this bb."""
assert len(a) == 2
assert len(b) == 2
return bool(lib.cpBBIntersectsSegment(self, a, b))
def contains(self, other: "BB") -> bool:
"""Returns true if bb completley contains the other bb"""
return bool(lib.cpBBContainsBB(self, other))
def contains_vect(self, v: Tuple[float, float]) -> bool:
"""Returns true if this bb contains the vector v"""
assert len(v) == 2
return bool(lib.cpBBContainsVect(self, v))
def merge(self, other: "BB") -> "BB":
"""Return the minimal bounding box that contains both this bb and the
other bb
"""
cp_bb = lib.cpBBMerge(self, other)
return BB(cp_bb.l, cp_bb.b, cp_bb.r, cp_bb.t)
def expand(self, v: Tuple[float, float]) -> "BB":
"""Return the minimal bounding box that contans both this bounding box
and the vector v
"""
cp_bb = lib.cpBBExpand(self, tuple(v))
return BB(cp_bb.l, cp_bb.b, cp_bb.r, cp_bb.t)
def center(self) -> Vec2d:
"""Return the center"""
v = lib.cpBBCenter(self)
return Vec2d(v.x, v.y)
def area(self) -> float:
"""Return the area"""
return lib.cpBBArea(self)
def merged_area(self, other: "BB") -> float:
"""Merges this and other then returns the area of the merged bounding
box.
"""
return lib.cpBBMergedArea(self, other)
def segment_query(self, a: Tuple[float, float], b: Tuple[float, float]) -> float:
"""Returns the fraction along the segment query the BB is hit.
Returns infinity if it doesnt hit
"""
assert len(a) == 2
assert len(b) == 2
return lib.cpBBSegmentQuery(self, a, b)
def clamp_vect(self, v: Tuple[float, float]) -> Vec2d:
"""Returns a copy of the vector v clamped to the bounding box"""
assert len(v) == 2
v2 = lib.cpBBClampVect(self, v)
return Vec2d(v2.x, v2.y)
'''
def wrap_vect(self, v):
"""Returns a copy of v wrapped to the bounding box.
That is, BB(0,0,10,10).wrap_vect((5,5)) == Vec2d._fromcffi(10,10)
"""
return lib._cpBBWrapVect(self.cp_bb[0], v)
'''
| mit | c6de0e771b52daa87023e4714b1d05cc | 30.061947 | 85 | 0.5849 | 3.346044 | false | false | false | false |
viblo/pymunk | dump/pyramid_bench.py | 1 | 2877 | """Basic benchmark of a pyramid of boxes
Results with 10000 iterations (lower is better)
python 2.6: 186.8 sec
pypy-1.9: 428.9 sec
"""
import timeit
import pymunk
from pymunk import Vec2d
class PyramidDemo:
def flipyv(self, v):
return v[0], -v[1]+self.h
def __init__(self):
self.running = True
self.drawing = True
self.w, self.h = 600,600
### Init pymunk and create space
self.space = pymunk.Space()
self.space.gravity = (0.0, -900.0)
### ground
body = pymunk.Body()
shape = pymunk.Segment(body, (50, 100), (550,100), .0)
shape.friction = 1.0
self.space.add(shape)
### pyramid
x=Vec2d(-100, 7.5) + (300,100)
y=Vec2d(0,0)
deltaX=Vec2d(0.5625, 2.0)*10
deltaY=Vec2d(1.125, 0.0)*10
for i in range(25):
y = Vec2d(x)
for j in range(i, 25):
size= 5
points = [(-size, -size), (-size, size), (size,size), (size, -size)]
mass = 1.0
moment = pymunk.moment_for_poly(mass, points, (0,0))
body = pymunk.Body(mass, moment)
body.position = y
shape = pymunk.Poly(body, points, (0,0))
shape.friction = 1
self.space.add(body,shape)
y += deltaY
x += deltaX
def run(self):
for x in range(10000):
self.loop()
def loop(self):
steps = 3
dt = 1.0/120.0/steps
for x in range(steps):
self.space.step(dt)
self.draw()
def draw(self):
#simulate drawing
for shape in self.space.shapes:
if shape.body.is_static:
body = shape.body
pv1 = self.flipyv(body.position + shape.a.cpvrotate(body.rotation_vector))
pv2 = self.flipyv(body.position + shape.b.cpvrotate(body.rotation_vector))
else:
if shape.body.is_sleeping:
continue
ps = shape.get_vertices()
ps.append(ps[0])
def main():
t = timeit.Timer('demo.run()', "gc.enable(); from __main__ import PyramidDemo;demo = PyramidDemo()")
print(t.timeit(number=1))
if __name__ == '__main__':
doprof = 0
if not doprof:
main()
else:
import cProfile
import pstats
prof = cProfile.run("main()", "profile.prof")
stats = pstats.Stats("profile.prof")
stats.strip_dirs()
stats.sort_stats('cumulative', 'time', 'calls')
stats.print_stats(30) | mit | 1f749ee06054716c778f415a61fc23cf | 27.080808 | 105 | 0.466111 | 3.651015 | false | false | false | false |
openelections/openelections-core | openelex/tasks/datasource.py | 1 | 3581 | from __future__ import print_function
import csv
from pprint import pprint
import inspect
import sys
import click
from openelex.base.datasource import MAPPING_FIELDNAMES
from .utils import default_state_options, load_module
def handle_task(task, state, datefilter):
"Call Datasoure methods dynamically based on task function name"
state_mod_name = "openelex.us.%s" % state
err_msg = "%s module could not be imported. Does it exist?"
try:
state_mod = load_module(state, ['datasource'])
except ImportError:
sys.exit(err_msg % state_mod_name)
try:
datasrc = state_mod.datasource.Datasource()
except AttributeError:
state_mod_name += ".datasource.py"
sys.exit(err_msg % state_mod_name)
method = getattr(datasrc, task)
return method(datefilter)
def pprint_results(func_name, results):
for result in results:
pprint(result)
print("\n%s returned %s results" % (func_name, len(results)))
def csv_results(results):
fieldname_set = set()
# Since mappings can have extra fields needed by a particular state,
# iterate through the items and record all seen fields.
for r in results:
for f in list(r.keys()):
fieldname_set.add(f)
# Put known fieldnames first in CSV header output
fieldnames = []
fieldnames.extend(MAPPING_FIELDNAMES)
fieldnames.extend(fieldname_set.difference(set(MAPPING_FIELDNAMES)))
writer = csv.DictWriter(sys.stdout, fieldnames=fieldnames)
writer.writeheader()
for r in results:
writer.writerow(r)
@click.command(name='datasource.target_urls', help="List source data urls for a state")
@default_state_options
def target_urls(state, datefilter=''):
"""
List source data urls for a state.
State is required. Optionally provide 'datefilter' to limit results.
"""
func_name = inspect.stack()[0][3]
results = handle_task(func_name, state, datefilter)
pprint_results(func_name, results)
@click.command(name='datasource.mappings', help="List metadata mappings for a state")
@default_state_options
def mappings(state, datefilter='', csvout=False):
"""
List metadata mappings for a state.
State is required. Optionally provide 'datefilter' to limit results.
"""
func_name = inspect.stack()[0][3]
results = handle_task(func_name, state, datefilter)
if csvout:
csv_results(results)
else:
pprint_results(func_name, results)
@click.command(name='datasource.elections', help="List elections for a state.")
@default_state_options
def elections(state, datefilter=''):
"""
List elections for a state. This data comes from the OpenElex Metadata API.
State is required. Optionally provide 'datefilter' to limit results.
"""
func_name = inspect.stack()[0][3]
results = handle_task(func_name, state, datefilter)
count = 0
for year, elecs in list(results.items()):
count += len(elecs)
pprint(elecs)
print("\n%s returned %s results" % (func_name, count))
@click.command(name='datasource.filename_url_pairs', help="List mapping of standard filenames to source urls for a state")
@default_state_options
def filename_url_pairs(state, datefilter=''):
"""
List mapping of standard filenames to source urls for a state
State is required. Optionally provide 'datefilter' to limit results.
"""
func_name = inspect.stack()[0][3]
handle_task(func_name, state, datefilter)
results = handle_task(func_name, state, datefilter)
pprint_results(func_name, results)
| mit | 62dbb92b3f6414554eb29510464caa28 | 32.783019 | 122 | 0.687797 | 3.741902 | false | false | false | false |
viblo/pymunk | pymunk/examples/arrows.py | 1 | 7410 | """Showcase of flying arrows that can stick to objects in a somewhat
realistic looking way.
"""
import sys
from typing import List
import pygame
import pymunk
import pymunk.pygame_util
from pymunk.vec2d import Vec2d
def create_arrow():
vs = [(-30, 0), (0, 3), (10, 0), (0, -3)]
# mass = 1
# moment = pymunk.moment_for_poly(mass, vs)
arrow_body = pymunk.Body(body_type=pymunk.Body.KINEMATIC)
arrow_shape = pymunk.Poly(arrow_body, vs)
arrow_shape.friction = 0.5
arrow_shape.collision_type = 1
arrow_shape.density = 0.1
return arrow_body, arrow_shape
def stick_arrow_to_target(space, arrow_body, target_body, position, flying_arrows):
pivot_joint = pymunk.PivotJoint(arrow_body, target_body, position)
phase = target_body.angle - arrow_body.angle
gear_joint = pymunk.GearJoint(arrow_body, target_body, phase, 1)
space.add(pivot_joint)
space.add(gear_joint)
try:
flying_arrows.remove(arrow_body)
except:
pass
def post_solve_arrow_hit(arbiter, space, data):
if arbiter.total_impulse.length > 300:
a, b = arbiter.shapes
position = arbiter.contact_point_set.points[0].point_a
b.collision_type = 0
b.group = 1
other_body = a.body
arrow_body = b.body
space.add_post_step_callback(
stick_arrow_to_target,
arrow_body,
other_body,
position,
data["flying_arrows"],
)
width, height = 690, 600
def main():
### PyGame init
pygame.init()
screen = pygame.display.set_mode((width, height))
clock = pygame.time.Clock()
running = True
font = pygame.font.SysFont("Arial", 16)
### Physics stuff
space = pymunk.Space()
space.gravity = 0, 1000
draw_options = pymunk.pygame_util.DrawOptions(screen)
# walls - the left-top-right walls
static: List[pymunk.Shape] = [
pymunk.Segment(space.static_body, (50, 550), (50, 50), 5),
pymunk.Segment(space.static_body, (50, 50), (650, 50), 5),
pymunk.Segment(space.static_body, (650, 50), (650, 550), 5),
pymunk.Segment(space.static_body, (50, 550), (650, 550), 5),
]
b2 = pymunk.Body(body_type=pymunk.Body.KINEMATIC)
static.append(pymunk.Circle(b2, 30))
b2.position = 300, 200
for s in static:
s.friction = 1.0
s.group = 1
space.add(b2, *static)
# "Cannon" that can fire arrows
cannon_body = pymunk.Body(body_type=pymunk.Body.KINEMATIC)
cannon_shape = pymunk.Circle(cannon_body, 25)
cannon_shape.sensor = True
cannon_shape.color = (255, 50, 50, 255)
cannon_body.position = 100, 500
space.add(cannon_body, cannon_shape)
arrow_body, arrow_shape = create_arrow()
space.add(arrow_body, arrow_shape)
flying_arrows: List[pymunk.Body] = []
handler = space.add_collision_handler(0, 1)
handler.data["flying_arrows"] = flying_arrows
handler.post_solve = post_solve_arrow_hit
start_time = 0
while running:
for event in pygame.event.get():
if (
event.type == pygame.QUIT
or event.type == pygame.KEYDOWN
and (event.key in [pygame.K_ESCAPE, pygame.K_q])
):
running = False
elif event.type == pygame.MOUSEBUTTONDOWN and event.button == 1:
start_time = pygame.time.get_ticks()
elif event.type == pygame.KEYDOWN and event.key == pygame.K_p:
pygame.image.save(screen, "arrows.png")
elif event.type == pygame.MOUSEBUTTONUP and event.button == 1:
end_time = pygame.time.get_ticks()
diff = end_time - start_time
power = max(min(diff, 1000), 10) * 13.5
impulse = power * Vec2d(1, 0)
impulse = impulse.rotated(arrow_body.angle)
arrow_body.body_type = pymunk.Body.DYNAMIC
arrow_body.apply_impulse_at_world_point(impulse, arrow_body.position)
# space.add(arrow_body)
flying_arrows.append(arrow_body)
arrow_body, arrow_shape = create_arrow()
space.add(arrow_body, arrow_shape)
keys = pygame.key.get_pressed()
speed = 2.5
if keys[pygame.K_UP]:
cannon_body.position += Vec2d(0, 1) * speed
if keys[pygame.K_DOWN]:
cannon_body.position += Vec2d(0, -1) * speed
if keys[pygame.K_LEFT]:
cannon_body.position += Vec2d(-1, 0) * speed
if keys[pygame.K_RIGHT]:
cannon_body.position += Vec2d(1, 0) * speed
mouse_position = pymunk.pygame_util.from_pygame(
Vec2d(*pygame.mouse.get_pos()), screen
)
cannon_body.angle = (mouse_position - cannon_body.position).angle
# move the unfired arrow together with the cannon
arrow_body.position = cannon_body.position + Vec2d(
cannon_shape.radius + 40, 0
).rotated(cannon_body.angle)
arrow_body.angle = cannon_body.angle
# print(arrow_body.angle)
for flying_arrow in flying_arrows:
drag_constant = 0.0002
pointing_direction = Vec2d(1, 0).rotated(flying_arrow.angle)
# print(pointing_direction.angle, flying_arrow.angle)
flight_direction = Vec2d(*flying_arrow.velocity)
flight_direction, flight_speed = flight_direction.normalized_and_length()
dot = flight_direction.dot(pointing_direction)
# (1-abs(dot)) can be replaced with (1-dot) to make arrows turn
# around even when fired straight up. Might not be as accurate, but
# maybe look better.
drag_force_magnitude = (
(1 - abs(dot)) * flight_speed ** 2 * drag_constant * flying_arrow.mass
)
arrow_tail_position = flying_arrow.position + Vec2d(-50, 0).rotated(
flying_arrow.angle
)
flying_arrow.apply_impulse_at_world_point(
drag_force_magnitude * -flight_direction, arrow_tail_position
)
flying_arrow.angular_velocity *= 0.5
### Clear screen
screen.fill(pygame.Color("black"))
### Draw stuff
space.debug_draw(draw_options)
# draw(screen, space)
# Power meter
if pygame.mouse.get_pressed()[0]:
current_time = pygame.time.get_ticks()
diff = current_time - start_time
power = max(min(diff, 1000), 10)
h = power // 2
pygame.draw.line(screen, pygame.Color("red"), (30, 550), (30, 550 - h), 10)
# Info and flip screen
screen.blit(
font.render("fps: " + str(clock.get_fps()), True, pygame.Color("white")),
(0, 0),
)
screen.blit(
font.render(
"Aim with mouse, hold LMB to powerup, release to fire",
True,
pygame.Color("darkgrey"),
),
(5, height - 35),
)
screen.blit(
font.render("Press ESC or Q to quit", True, pygame.Color("darkgrey")),
(5, height - 20),
)
pygame.display.flip()
### Update physics
fps = 60
dt = 1.0 / fps
space.step(dt)
clock.tick(fps)
if __name__ == "__main__":
sys.exit(main())
| mit | f3fa63b2823f8a56536c85fb6e1c4ea1 | 31.933333 | 87 | 0.57193 | 3.408464 | false | false | false | false |
viblo/pymunk | dump/many_crash.py | 1 | 1387 | import gc
import multiprocessing
import random
import pymunk
random.seed(0)
loops = 20
num_objects = 200
d = {}
def f(x):
print(f"loop {x}/{loops}")
s = pymunk.Space()
for x in range(num_objects):
b = pymunk.Body(10, 20)
c = lambda: pymunk.Circle(b, 10)
e = lambda: pymunk.Segment(b, (-10, 0), (10, 0), 5)
p = lambda: pymunk.Poly.create_box(b)
shape = random.choice([c, e, p])()
if random.random() > 0.9:
o = random.choice([shape, b])
d[o] = True
if random.random() > 0.99 and len(d) > 10:
k = random.choice(list(d.keys()))
del d[k]
b.position = random.randint(0, 100), random.randint(0, 100)
# if random.random() > 0.99:
# gc.collect()
s.add(b, shape)
for x in range(num_objects):
a, b = random.choices(s.bodies, k=2)
if a == b:
continue
c = pymunk.PinJoint(a, b)
s.add(c)
for x in range(100):
s.step(0.02)
# if random.random() > 0.75:
# gc.collect()
def start_pool():
print("Starting", multiprocessing.current_process().name)
if __name__ == "__main__":
for x in range(4):
with multiprocessing.Pool(processes=4, initializer=start_pool) as pool:
r = pool.map(f, range(loops))
# print(len(d))
print("done")
| mit | a49d28b59c2f2a91107675e57d0e79ae | 20.015152 | 79 | 0.524153 | 3.075388 | false | false | false | false |
viblo/pymunk | pymunk/tests/test_shape_filter.py | 1 | 1109 | import pickle
import unittest
import pymunk as p
class UnitTestShapeFilter(unittest.TestCase):
def testInit(self) -> None:
f = p.ShapeFilter()
self.assertEqual(f.group, 0)
self.assertEqual(f.categories, 0xFFFFFFFF)
self.assertEqual(f.mask, 0xFFFFFFFF)
f = p.ShapeFilter(1, 2, 3)
self.assertEqual(f.group, 1)
self.assertEqual(f.categories, 2)
self.assertEqual(f.mask, 3)
def testConstants(self) -> None:
self.assertEqual(p.ShapeFilter.ALL_MASKS(), 0xFFFFFFFF)
self.assertEqual(p.ShapeFilter.ALL_CATEGORIES(), 0xFFFFFFFF)
def testEq(self) -> None:
f1 = p.ShapeFilter(1, 2, 3)
f2 = p.ShapeFilter(1, 2, 3)
f3 = p.ShapeFilter(2, 3, 4)
self.assertTrue(f1 == f2)
self.assertTrue(f1 != f3)
def testPickle(self) -> None:
x = p.ShapeFilter(1, 2, 3)
s = pickle.dumps(x, 2)
actual = pickle.loads(s)
self.assertEqual(x, actual)
class UnitTestContactPoint(unittest.TestCase):
pass
class UnitTestContactPointSet(unittest.TestCase):
pass
| mit | 0d39d8c6ff2176294b6b9d3c5dd77da2 | 25.404762 | 68 | 0.625789 | 3.340361 | false | true | false | false |
openelections/openelections-core | openelex/us/ar/datasource.py | 1 | 9828 | from future import standard_library
standard_library.install_aliases()
import os.path
import re
import urllib.parse
from bs4 import BeautifulSoup
import requests
import unicodecsv
from openelex.base.datasource import BaseDatasource
from openelex.lib import build_github_url
from openelex.lib.text import ocd_type_id
class Datasource(BaseDatasource):
RESULTS_PORTAL_URL = "http://www.sos.arkansas.gov/electionresults/index.php"
CLARITY_PORTAL_URL = "http://results.enr.clarityelections.com/AR/"
# There aren't precinct-level results for these, just a CSV file with
# summary data for the county.
no_precinct_urls = [
"http://results.enr.clarityelections.com/AR/Columbia/42858/111213/en/summary.html",
"http://results.enr.clarityelections.com/AR/Ouachita/42896/112694/en/summary.html",
"http://results.enr.clarityelections.com/AR/Union/42914/112664/en/summary.html",
]
def mappings(self, year=None):
mappings = []
for yr, elecs in list(self.elections(year).items()):
mappings.extend(self._build_metadata(yr, elecs))
return mappings
def target_urls(self, year=None):
return [item['raw_url'] for item in self.mappings(year)]
def filename_url_pairs(self, year=None):
return [(item['generated_filename'], self._url_for_fetch(item))
for item in self.mappings(year)]
def unprocessed_filename_url_pairs(self, year=None):
return [(item['generated_filename'].replace(".csv", ".pdf"), item['raw_url'])
for item in self.mappings(year)
if 'pre_processed_url' in item]
def mappings_for_url(self, url):
return [mapping for mapping in self.mappings() if mapping['raw_url'] == url]
def _build_metadata(self, year, elections):
meta_entries = []
for election in elections:
meta_entries.extend(self._build_election_metadata(election))
return meta_entries
def _build_election_metadata(self, election):
"""
Return a list of metadata entries for a single election.
"""
slug = election['slug']
link = election['direct_links'][0]
if slug == 'ar-2000-11-07-general':
return self._build_election_metadata_2000_general(election)
elif slug in ('ar-2000-11-07-special-general',
'ar-2001-09-25-special-primary',
'ar-2001-10-16-special-primary-runoff',
'ar-2001-11-20-special-general'):
return self._build_election_metadata_zipped_special(election)
elif link.startswith(self.CLARITY_PORTAL_URL):
return self._build_election_metadata_clarity(election)
else:
return self._build_election_metadata_default(election)
def _build_election_metadata_default(self, election):
link = election['direct_links'][0]
filename_kwargs = {}
if link.startswith(self.RESULTS_PORTAL_URL):
# Report portal results are precinct-level
filename_kwargs['reporting_level'] = 'precinct'
# And the format is tab-delimited text
filename_kwargs['extension'] = '.tsv'
generated_filename = self._standardized_filename(election, **filename_kwargs)
mapping = {
"generated_filename": generated_filename,
"raw_url": link,
"ocd_id": 'ocd-division/country:us/state:ar',
"name": 'Arkansas',
"election": election['slug']
}
if "2002" in election['slug']:
generated_filename = generated_filename.replace('.pdf', '.csv')
mapping['pre_processed_url'] = build_github_url(self.state,
generated_filename)
mapping['generated_filename'] = generated_filename
return [mapping]
def _build_election_metadata_2000_general(self, election):
meta_entries = []
for county in self._counties():
county_name = county['name']
filename = self._standardized_filename(election,
jurisdiction=county_name, reporting_level='precinct',
extension='.txt')
raw_extracted_filename = self._raw_extracted_filename_2000_general(county_name)
meta_entries.append({
'generated_filename': filename,
'raw_url': election['direct_links'][0],
'raw_extracted_filename': raw_extracted_filename,
'ocd_id': county['ocd_id'],
'name': county_name,
'election': election['slug'],
})
return meta_entries
def _build_election_metadata_zipped_special(self, election):
meta_entries = []
url_paths = self._url_paths_for_election(election['slug'])
for path in url_paths:
filename_kwargs = {
'reporting_level': path['reporting_level'],
'extension': '.txt',
'office': path['office'],
'office_district': path['district'],
}
if path['reporting_level'] == 'precinct':
filename_kwargs['jurisdiction'] = path['jurisdiction']
jurisdiction = path['jurisdiction']
ocd_id = 'ocd-division/country:us/state:ar/county:{}'.format(ocd_type_id(jurisdiction))
else:
jurisdiction = 'Arkansas'
ocd_id = 'ocd-division/country:us/state:ar'
filename = self._standardized_filename(election, **filename_kwargs)
meta_entries.append({
'generated_filename': filename,
'raw_url': path['url'],
'raw_extracted_filename': path['raw_extracted_filename'],
'ocd_id': ocd_id,
'name': jurisdiction,
'election': election['slug'],
})
return meta_entries
def _raw_extracted_filename_2000_general(self, county_name):
county_part = county_name + " County"
county_part = county_part.upper().replace(' ', '')
return "cty{}.txt".format(county_part[:7])
def _build_election_metadata_clarity(self, election, fmt="xml"):
"""
Return metadata entries for election results provided by the Clarity
system.
These results seem to be for elections starting in 2012.
Keyword Arguments:
* fmt - Format of results file. Can be "xls", "txt" or "xml".
Default is "xml".
"""
base_url = election['direct_links'][0]
jurisdiction = clarity.Jurisdiction(url=base_url, level='state')
return self._build_election_metadata_clarity_county(election, fmt, jurisdiction) +\
self._build_election_metadata_clarity_precinct(election, fmt, jurisdiction)
def _build_election_metadata_clarity_county(self, election, fmt, jurisdiction):
return [{
"generated_filename": self._standardized_filename(election,
reporting_level='county', extension='.'+fmt),
"raw_extracted_filename": "detail.{}".format(fmt),
"raw_url": jurisdiction.report_url(fmt),
"ocd_id": 'ocd-division/country:us/state:ar',
"name": 'Arkansas',
"election": election['slug']
}]
def _build_election_metadata_clarity_precinct(self, election, fmt, jurisdiction):
meta_entries = []
for path in self._clarity_precinct_url_paths(election, fmt, jurisdiction):
jurisdiction_name = path['jurisdiction']
ocd_id = 'ocd-division/country:us/state:ar/county:{}'.format(ocd_type_id(jurisdiction_name))
filename = self._standardized_filename(election,
jurisdiction=jurisdiction_name, reporting_level='precinct',
extension='.'+fmt)
meta_entries.append({
"generated_filename": filename,
"raw_extracted_filename": "detail.{}".format(fmt),
"raw_url": path['url'],
"ocd_id": ocd_id,
"name": jurisdiction_name,
"election": election['slug'],
})
return meta_entries
def _clarity_precinct_url_paths_filename(self, election):
filename = self._standardized_filename(election, ['url_paths'],
reporting_level='precinct', extension='.csv')
return os.path.join(self.mappings_dir, filename)
def _clarity_precinct_url_paths(self, election, fmt, jurisdiction):
url_paths_filename = self._clarity_precinct_url_paths_filename(election)
if os.path.exists(url_paths_filename):
return self._url_paths(url_paths_filename)
url_paths = []
for subjurisdiction in jurisdiction.get_subjurisdictions():
if subjurisdiction.url not in self.no_precinct_urls:
url_paths.append({
'date': election['start_date'],
'office': '',
'race_type': election['race_type'],
'party': '',
'special': election['special'],
'url': subjurisdiction.report_url(fmt),
'reporting_level': 'precinct',
'jurisdiction': subjurisdiction.name,
})
with open(url_paths_filename, 'wb') as f:
fieldnames = ['date', 'office', 'race_type', 'party',
'special', 'url', 'reporting_level', 'jurisdiction']
writer = unicodecsv.DictWriter(f, fieldnames)
writer.writeheader()
writer.writerows(url_paths)
return url_paths
def _url_for_fetch(self, mapping):
if 'pre_processed_url' in mapping:
return mapping['pre_processed_url']
else:
return mapping['raw_url']
| mit | b5b6c5dd36c6041a0abb8251b74e46ff | 40.294118 | 104 | 0.588523 | 3.9 | false | false | false | false |
openelections/openelections-core | openelex/us/sd/datasource.py | 1 | 5593 | """
Standardize names of data files on South Dakota Secretary of State website.
The state offers PDF files containing precinct-level results for statewide candidates (includes U.S. House) and state legislative candidates.
The CSV versions of those are contained in the https://github.com/openelections/openelections-data-sd repository.
"""
from future import standard_library
standard_library.install_aliases()
from builtins import zip
from os.path import join
import json
import unicodecsv
import urllib.parse
import requests
from bs4 import BeautifulSoup
from openelex import PROJECT_ROOT
from openelex.base.datasource import BaseDatasource
from openelex.lib import build_github_url
class Datasource(BaseDatasource):
# PUBLIC INTERFACE
def mappings(self, year=None):
"""Return array of dicts containing source url and
standardized filename for raw results file, along
with other pieces of metadata
"""
mappings = []
for yr, elecs in list(self.elections(year).items()):
mappings.extend(self._build_metadata(yr, elecs))
return mappings
def target_urls(self, year=None):
"Get list of source data urls, optionally filtered by year"
return [item['raw_url'] for item in self.mappings(year)]
def filename_url_pairs(self, year=None):
return [(item['generated_filename'], self._url_for_fetch(item))
for item in self.mappings(year)]
def unprocessed_filename_url_pairs(self, year=None):
return [(item['generated_filename'].replace(".csv", ".pdf"), item['raw_url'])
for item in self.mappings(year)
if item['pre_processed_url']]
# PRIVATE METHODS
def _build_metadata(self, year, elections):
meta = []
year_int = int(year)
if year < 2008:
for election in elections:
results = [x for x in self._url_paths() if x['date'] == election['start_date']]
for result in results:
generated_filename = self._generate_office_filename(election['direct_links'][0], election['start_date'], election['race_type'], result)
meta.append({
"generated_filename": generated_filename,
"raw_url": self._build_raw_url(year, result['path']),
"pre_processed_url": build_github_url(self.state, generated_filename),
"ocd_id": 'ocd-division/country:us/state:wv',
"name": 'West Virginia',
"election": election['slug']
})
else:
for election in elections:
csv_links = self._find_csv_links(election['direct_links'][0])
counties = self._jurisdictions()
results = list(zip(counties, csv_links[1:]))
for result in results:
meta.append({
"generated_filename": self._generate_county_filename(result[0]['county'], election),
"pre_processed_url": None,
"raw_url": result[1],
"ocd_id": result[0]['ocd_id'],
"name": result[0]['county'],
"election": election['slug']
})
return meta
def _build_raw_url(self, year, path):
return "http://www.sos.wv.gov/elections/history/electionreturns/Documents/%s/%s" % (year, path)
def _generate_statewide_filename(self, election):
election_type = election['race_type']
if election['special']:
election_type = 'special__' + election_type
bits = [
election['start_date'].replace('-',''),
self.state.lower(),
election_type
]
return "__".join(bits) + '.csv'
def _generate_county_filename(self, county, election):
bits = [
election['start_date'].replace('-',''),
self.state.lower(),
election['race_type'],
county.lower()
]
return "__".join(bits) + '.csv'
def _generate_office_filename(self, url, start_date, election_type, result):
# example: 20120508__wv__primary__wirt.csv
if result['district'] == '':
office = result['office']
else:
office = result['office'] + '__' + result['district']
if result['special']:
election_type = 'special__' + election_type
bits = [
start_date.replace('-',''),
self.state.lower(),
election_type,
office
]
path = urllib.parse.urlparse(url).path
name = "__".join(bits) + '.csv'
return name
def _find_csv_links(self, url):
"Returns a list of dicts of counties and CSV formatted results files for elections 2008-present. First item is statewide, remainder are county-level."
r = requests.get(url)
soup = BeautifulSoup(r.text, 'html.parser')
return ['http://apps.sos.wv.gov/elections/results/'+x['href'] for x in soup.find_all('a') if x.text == 'Download Comma Separated Values (CSV)']
def _jurisdictions(self):
"""West Virginia counties"""
m = self.jurisdiction_mappings()
mappings = [x for x in m if x['county'] != ""]
return mappings
def _url_for_fetch(self, mapping):
if mapping['pre_processed_url']:
return mapping['pre_processed_url']
else:
return mapping['raw_url']
| mit | 283434198921a04ed901a1acd34d0795 | 39.23741 | 158 | 0.571607 | 4.076531 | false | false | false | false |
openelections/openelections-core | openelex/us/ga/datasource.py | 1 | 5614 | """
Stake in the ground for GA results
"""
from future import standard_library
standard_library.install_aliases()
from os.path import join
import json
import datetime
import urllib.parse
from openelex import PROJECT_ROOT
from openelex.lib import build_github_url
from openelex.base.datasource import BaseDatasource
class Datasource(BaseDatasource):
# PUBLIC INTERFACE
def mappings(self, year=None):
"""Return array of dicts containing source url and
standardized filename for raw results file, along
with other pieces of metadata
"""
mappings = []
for yr, elecs in list(self.elections(year).items()):
mappings.extend(self._build_metadata(yr, elecs))
return mappings
def target_urls(self, year=None):
"Get list of source data urls, optionally filtered by year"
return [item['raw_url'] for item in self.mappings(year)]
def filename_url_pairs(self, year=None):
return [(item['generated_filename'], item['raw_url'])
for item in self.mappings(year)]
def mappings_for_url(self, url):
return [mapping for mapping in self.mappings() if mapping['raw_url'] == url]
# PRIVATE METHODS
def _build_metadata(self, year, elections):
meta = []
year_int = int(year)
for election in elections:
if election['special']:
results = [x for x in self._url_paths() if x['date'] == election['start_date'] and x['special'] == True]
else:
results = [x for x in self._url_paths() if x['date'] == election['start_date'] and x['special'] == False]
for result in results:
if result['url']:
raw_url = result['url']
else:
raw_url = None
if result['special']:
ocd_id = 'ocd-division/country:us/state:ga'
name = "Georgia"
generated_filename = self._generate_filename(election)
meta.append({
"generated_filename": generated_filename,
"raw_url": raw_url,
"pre_processed_url": build_github_url(self.state, generated_filename),
"ocd_id": ocd_id,
"name": 'Georgia',
"election": election['slug']
})
generated_filename = self._generate_special_filename(election, result)
meta.append({
"generated_filename": generated_filename,
"raw_url": raw_url,
"pre_processed_url": build_github_url(self.state, generated_filename),
"ocd_id": ocd_id,
"name": 'Georgia',
"election": election['slug']
})
else:
generated_filename = self._generate_filename(election)
ocd_id = 'ocd-division/country:us/state:ga'
name = "Georgia"
for jurisdiction in self._jurisdictions():
generated_filename = self._generate_county_filename(election, jurisdiction['county'], result)
ocd_id = 'ocd-division/country:us/state:ga/county:%s' % result['county'].lower().replace(" ", "_")
meta.append({
"generated_filename": generated_filename,
"raw_url": raw_url,
"pre_processed_url": build_github_url(self.state, generated_filename),
"ocd_id": ocd_id,
"name": jurisdiction['county'],
"election": election['slug']
})
return meta
def _generate_filename(self, election):
if election['special']:
election_type = 'special__' + election['race_type'].replace("-","__")
else:
election_type = election['race_type'].replace("-","__")
bits = [
election['start_date'].replace('-',''),
self.state.lower(),
election_type
]
name = "__".join(bits) + '.csv'
return name
def _generate_county_filename(self, election, county, result):
if election['special']:
election_type = 'special__' + election['race_type'].replace("-","__")
else:
election_type = election['race_type'].replace("-","__")
bits = [
election['start_date'].replace('-',''),
self.state.lower(),
]
if result['party']:
bits.append(result['party'].lower())
bits.extend([
election_type,
county.replace(' ','_').lower()
])
bits.append('precinct')
filename = "__".join(bits) + '.csv'
return filename
def _generate_special_filename(self, election, result):
bits = [
election['start_date'].replace('-',''),
self.state.lower(),
]
if result['party']:
bits.append(result['party'].lower())
bits.extend([
'special__' + election['race_type'].replace("-","__")
])
bits.append('precinct')
filename = "__".join(bits) + '.csv'
return filename
def _jurisdictions(self):
"""Georgia counties"""
m = self.jurisdiction_mappings()
mappings = [x for x in m if x['county'] != ""]
return mappings
| mit | dc7e3f0aef375023ffc6022bfb475cad | 38.535211 | 122 | 0.508906 | 4.338485 | false | false | false | false |
viblo/pymunk | benchmarks/vec2d_baseclass.py | 1 | 3960 | """Test different ways to implement Vec2d.
Compares:
- Object and NamedTuple as base classes
- Ways to create a Vec2d.
"""
from typing import NamedTuple
import pymunk
print("pymunk.version", pymunk.version)
s = None
g = None
vec_obj = None
vec_ntuple = None
def setup():
global s
global g
global vec_obj
global vec_ntuple
s = pymunk.Space()
s.gravity = 123, 456
g = pymunk.cp.cpSpaceGetGravity(s._space)
vec_obj = Vec2dObject(123, 456)
vec_ntuple = Vec2dNamedTuple(123, 456)
# 1 Vec2d with object as baseclass
class Vec2dObject:
__slots__ = ("x", "y")
x: float
y: float
@staticmethod
def _fromcffi(p) -> "Vec2dObject":
"""Used as a speedy way to create Vec2ds internally in pymunk."""
v = Vec2dObject.__new__(Vec2dObject)
v.x = p.x
v.y = p.y
return v
def __init__(self, x_or_pair, y=None):
if y is None:
if isinstance(x_or_pair, Vec2dObject):
self.x = x_or_pair.x
self.y = x_or_pair.y
else:
assert (
len(x_or_pair) == 2
), f"{x_or_pair} must be of length 2 when used alone"
self.x = x_or_pair[0]
self.y = x_or_pair[1]
else:
self.x = x_or_pair
self.y = y
def __getitem__(self, i):
if i == 0:
return self.x
elif i == 1:
return self.y
raise IndexError()
def __iter__(self):
yield self.x
yield self.y
def __len__(self) -> int:
return 2
# String representaion (for debugging)
def __repr__(self) -> str:
return "Vec2dObject(%s, %s)" % (self.x, self.y)
# Comparison
def __eq__(self, other) -> bool:
if hasattr(other, "__getitem__") and len(other) == 2:
return self.x == other[0] and self.y == other[1]
else:
return False
def __ne__(self, other) -> bool:
if hasattr(other, "__getitem__") and len(other) == 2:
return self.x != other[0] or self.y != other[1]
else:
return True
class Vec2dNamedTuple(NamedTuple):
x: float
y: float
@staticmethod
def _fromcffi(p) -> "Vec2dNamedTuple":
"""Used as a speedy way to create Vec2ds internally in pymunk."""
return Vec2dNamedTuple.__new__(Vec2dNamedTuple, p.x, p.y)
@staticmethod
def _fromcffi2(p) -> "Vec2dNamedTuple":
"""Used as a speedy way to create Vec2ds internally in pymunk."""
return Vec2dNamedTuple(p.x, p.y)
# Benchmarks
def bench_creation_constructor():
g2 = g
gr = Vec2dNamedTuple(g2.x, g2.y)
# not supported:
# def bench_creation_constructor_unpack():
# gr = Vec2dNamedTuple(*g)
def bench_creation_fromcffi():
gr = Vec2dNamedTuple._fromcffi(g)
def bench_creation_fromcffi2():
gr = Vec2dNamedTuple._fromcffi2(g)
def bench_creation_usingnew():
gr = Vec2dNamedTuple.__new__(Vec2dNamedTuple, g.x, g.y)
def bench_set_vec_obj():
pymunk.cp.cpSpaceSetGravity(s._space, tuple(vec_obj))
def bench_set_vec_ntuple_wrapped():
pymunk.cp.cpSpaceSetGravity(s._space, tuple(vec_ntuple))
def bench_set_vec_ntuple():
assert len(vec_ntuple) == 2
pymunk.cp.cpSpaceSetGravity(s._space, vec_ntuple)
def run_bench(func):
print(f"Running {func}")
print(
sorted(
timeit.repeat(
f"{func}()",
setup=f"from __main__ import {func}",
)
)
)
if __name__ == "__main__":
import timeit
print(f"Benchmark: Compare ways to construct Vec2ds")
setup()
run_bench("bench_creation_constructor")
run_bench("bench_creation_fromcffi")
run_bench("bench_creation_fromcffi2")
run_bench("bench_creation_usingnew")
run_bench("bench_set_vec_obj")
run_bench("bench_set_vec_ntuple_wrapped")
run_bench("bench_set_vec_ntuple")
| mit | df290c5bcdb8928846caca36e3ed4131 | 22.431953 | 73 | 0.570707 | 3.209076 | false | false | false | false |
viblo/pymunk | pymunk/examples/platformer.py | 1 | 10982 | """Showcase of a very basic 2d platformer
The red girl sprite is taken from Sithjester's RMXP Resources:
http://untamed.wild-refuge.net/rmxpresources.php?characters
.. note:: The code of this example is a bit messy. If you adapt this to your
own code you might want to structure it a bit differently.
"""
__docformat__ = "reStructuredText"
import math
import sys
import pygame
import pymunk
import pymunk.pygame_util
from pymunk.vec2d import Vec2d
def cpfclamp(f, min_, max_):
"""Clamp f between min and max"""
return min(max(f, min_), max_)
def cpflerpconst(f1, f2, d):
"""Linearly interpolate from f1 to f2 by no more than d."""
return f1 + cpfclamp(f2 - f1, -d, d)
width, height = 690, 400
fps = 60
dt = 1.0 / fps
PLAYER_VELOCITY = 100.0 * 2.0
PLAYER_GROUND_ACCEL_TIME = 0.05
PLAYER_GROUND_ACCEL = PLAYER_VELOCITY / PLAYER_GROUND_ACCEL_TIME
PLAYER_AIR_ACCEL_TIME = 0.25
PLAYER_AIR_ACCEL = PLAYER_VELOCITY / PLAYER_AIR_ACCEL_TIME
JUMP_HEIGHT = 16.0 * 3
JUMP_BOOST_HEIGHT = 24.0
JUMP_CUTOFF_VELOCITY = 100
FALL_VELOCITY = 250.0
JUMP_LENIENCY = 0.05
HEAD_FRICTION = 0.7
PLATFORM_SPEED = 1
def main():
### PyGame init
pygame.init()
screen = pygame.display.set_mode((width, height))
clock = pygame.time.Clock()
running = True
font = pygame.font.SysFont("Arial", 16)
sound = pygame.mixer.Sound("sfx.wav")
img = pygame.image.load("xmasgirl1.png")
### Physics stuff
space = pymunk.Space()
space.gravity = Vec2d(0, -1000)
pymunk.pygame_util.positive_y_is_up = True
draw_options = pymunk.pygame_util.DrawOptions(screen)
# box walls
static = [
pymunk.Segment(space.static_body, (10, 50), (300, 50), 3),
pymunk.Segment(space.static_body, (300, 50), (325, 50), 3),
pymunk.Segment(space.static_body, (325, 50), (350, 50), 3),
pymunk.Segment(space.static_body, (350, 50), (375, 50), 3),
pymunk.Segment(space.static_body, (375, 50), (680, 50), 3),
pymunk.Segment(space.static_body, (680, 50), (680, 370), 3),
pymunk.Segment(space.static_body, (680, 370), (10, 370), 3),
pymunk.Segment(space.static_body, (10, 370), (10, 50), 3),
]
static[1].color = pygame.Color("red")
static[2].color = pygame.Color("green")
static[3].color = pygame.Color("red")
# rounded shape
rounded = [
pymunk.Segment(space.static_body, (500, 50), (520, 60), 3),
pymunk.Segment(space.static_body, (520, 60), (540, 80), 3),
pymunk.Segment(space.static_body, (540, 80), (550, 100), 3),
pymunk.Segment(space.static_body, (550, 100), (550, 150), 3),
]
# static platforms
platforms = [
pymunk.Segment(space.static_body, (170, 50), (270, 150), 3)
# , pymunk.Segment(space.static_body, (270, 100), (300, 100), 5)
,
pymunk.Segment(space.static_body, (400, 150), (450, 150), 3),
pymunk.Segment(space.static_body, (400, 200), (450, 200), 3),
pymunk.Segment(space.static_body, (220, 200), (300, 200), 3),
pymunk.Segment(space.static_body, (50, 250), (200, 250), 3),
pymunk.Segment(space.static_body, (10, 370), (50, 250), 3),
]
for s in static + platforms + rounded:
s.friction = 1.0
s.group = 1
space.add(*static, *platforms, *rounded)
# moving platform
platform_path = [(650, 100), (600, 200), (650, 300)]
platform_path_index = 0
platform_body = pymunk.Body(body_type=pymunk.Body.KINEMATIC)
platform_body.position = 650, 100
s = pymunk.Segment(platform_body, (-25, 0), (25, 0), 5)
s.friction = 1.0
s.group = 1
s.color = pygame.Color("blue")
space.add(platform_body, s)
# pass through platform
passthrough = pymunk.Segment(space.static_body, (270, 100), (320, 100), 5)
passthrough.color = pygame.Color("yellow")
passthrough.friction = 1.0
passthrough.collision_type = 2
passthrough.filter = pymunk.ShapeFilter(categories=0b1000)
space.add(passthrough)
def passthrough_handler(arbiter, space, data):
if arbiter.shapes[0].body.velocity.y < 0:
return True
else:
return False
space.add_collision_handler(1, 2).begin = passthrough_handler
# player
body = pymunk.Body(5, float("inf"))
body.position = 100, 100
head = pymunk.Circle(body, 10, (0, 5))
head2 = pymunk.Circle(body, 10, (0, 13))
feet = pymunk.Circle(body, 10, (0, -5))
# Since we use the debug draw we need to hide these circles. To make it
# easy we just set their color to black.
feet.color = 0, 0, 0, 0
head.color = 0, 0, 0, 0
head2.color = 0, 0, 0, 0
mask = pymunk.ShapeFilter.ALL_MASKS() ^ passthrough.filter.categories
sf = pymunk.ShapeFilter(mask=mask)
head.filter = sf
head2.filter = sf
feet.collision_type = 1
feet.ignore_draw = head.ignore_draw = head2.ignore_draw = True
space.add(body, head, feet, head2)
direction = 1
remaining_jumps = 2
landing = {"p": Vec2d.zero(), "n": 0}
frame_number = 0
landed_previous = False
while running:
grounding = {
"normal": Vec2d.zero(),
"penetration": Vec2d.zero(),
"impulse": Vec2d.zero(),
"position": Vec2d.zero(),
"body": None,
}
# find out if player is standing on ground
def f(arbiter):
n = -arbiter.contact_point_set.normal
if n.y > grounding["normal"].y:
grounding["normal"] = n
grounding["penetration"] = -arbiter.contact_point_set.points[0].distance
grounding["body"] = arbiter.shapes[1].body
grounding["impulse"] = arbiter.total_impulse
grounding["position"] = arbiter.contact_point_set.points[0].point_b
body.each_arbiter(f)
well_grounded = False
if (
grounding["body"] != None
and abs(grounding["normal"].x / grounding["normal"].y) < feet.friction
):
well_grounded = True
remaining_jumps = 2
ground_velocity = Vec2d.zero()
if well_grounded:
ground_velocity = grounding["body"].velocity
for event in pygame.event.get():
if (
event.type == pygame.QUIT
or event.type == pygame.KEYDOWN
and (event.key in [pygame.K_ESCAPE, pygame.K_q])
):
running = False
elif event.type == pygame.KEYDOWN and event.key == pygame.K_p:
pygame.image.save(screen, "platformer.png")
elif event.type == pygame.KEYDOWN and event.key == pygame.K_UP:
if well_grounded or remaining_jumps > 0:
jump_v = math.sqrt(2.0 * JUMP_HEIGHT * abs(space.gravity.y))
impulse = (0, body.mass * (ground_velocity.y + jump_v))
body.apply_impulse_at_local_point(impulse)
remaining_jumps -= 1
elif event.type == pygame.KEYUP and event.key == pygame.K_UP:
body.velocity = body.velocity.x, min(
body.velocity.y, JUMP_CUTOFF_VELOCITY
)
# Target horizontal velocity of player
target_vx = 0
if body.velocity.x > 0.01:
direction = 1
elif body.velocity.x < -0.01:
direction = -1
keys = pygame.key.get_pressed()
if keys[pygame.K_LEFT]:
direction = -1
target_vx -= PLAYER_VELOCITY
if keys[pygame.K_RIGHT]:
direction = 1
target_vx += PLAYER_VELOCITY
if keys[pygame.K_DOWN]:
direction = -3
feet.surface_velocity = -target_vx, 0
if grounding["body"] != None:
feet.friction = -PLAYER_GROUND_ACCEL / space.gravity.y
head.friction = HEAD_FRICTION
else:
feet.friction, head.friction = 0, 0
# Air control
if grounding["body"] == None:
body.velocity = Vec2d(
cpflerpconst(
body.velocity.x,
target_vx + ground_velocity.x,
PLAYER_AIR_ACCEL * dt,
),
body.velocity.y,
)
body.velocity = body.velocity.x, max(
body.velocity.y, -FALL_VELOCITY
) # clamp upwards as well?
# Move the moving platform
destination = platform_path[platform_path_index]
current = Vec2d(*platform_body.position)
distance = current.get_distance(destination)
if distance < PLATFORM_SPEED:
platform_path_index += 1
platform_path_index = platform_path_index % len(platform_path)
t = 1
else:
t = PLATFORM_SPEED / distance
new = current.interpolate_to(destination, t)
platform_body.position = new
platform_body.velocity = (new - current) / dt
### Clear screen
screen.fill(pygame.Color("black"))
### Helper lines
for y in [50, 100, 150, 200, 250, 300]:
color = pygame.Color("green")
pygame.draw.line(screen, color, (10, y), (680, y), 1)
### Draw stuff
space.debug_draw(draw_options)
direction_offset = 48 + (1 * direction + 1) // 2 * 48
if grounding["body"] != None and abs(target_vx) > 1:
animation_offset = 32 * (frame_number // 8 % 4)
elif grounding["body"] is None:
animation_offset = 32 * 1
else:
animation_offset = 32 * 0
position = body.position + (-16, 28)
p = pymunk.pygame_util.to_pygame(position, screen)
screen.blit(img, p, (animation_offset, direction_offset, 32, 48))
# Did we land?
if abs(grounding["impulse"].y) / body.mass > 200 and not landed_previous:
sound.play()
landing = {"p": grounding["position"], "n": 5}
landed_previous = True
else:
landed_previous = False
if landing["n"] > 0:
p = pymunk.pygame_util.to_pygame(landing["p"], screen)
pygame.draw.circle(screen, pygame.Color("yellow"), p, 5)
landing["n"] -= 1
# Info and flip screen
screen.blit(
font.render("fps: " + str(clock.get_fps()), 1, pygame.Color("white")),
(0, 0),
)
screen.blit(
font.render(
"Move with Left/Right, jump with Up, press again to double jump",
1,
pygame.Color("darkgrey"),
),
(5, height - 35),
)
screen.blit(
font.render("Press ESC or Q to quit", 1, pygame.Color("darkgrey")),
(5, height - 20),
)
pygame.display.flip()
frame_number += 1
### Update physics
space.step(dt)
clock.tick(fps)
if __name__ == "__main__":
sys.exit(main())
| mit | 5f2447a202f1d3911777364cc80f9668 | 31.39528 | 88 | 0.562739 | 3.361494 | false | false | false | false |
openelections/openelections-core | openelex/us/md/validate/election.py | 1 | 21387 | from __future__ import print_function
from builtins import object
import os
import unicodecsv
from openelex.models import Contest, Candidate, Result
from openelex.us.md import jurisdiction
from functools import reduce
# Classes that describe election attributes
class MDElection(object):
"""
Base class for describing Maryland elections.
Subclasses, should, at the very least, define ``election_id`` and
``candidate_counts`` attributes.
It will also likely be useful to define ``num_{{reporting_level}}_results``
attributes that contain the known number of results for a particular
reporting level.
"""
election_id = None
"""
Identifier for election.
This should match the ID from the OpenElections metadata API.
"""
candidate_counts = {}
"""
Map of contest slugs to known number of candidates.
"""
reporting_levels = []
"""
Iterable of available reporting levels of results in this election.
"""
counties = jurisdiction.counties
congressional_districts = jurisdiction.congressional_districts
state_senate_districts = jurisdiction.state_senate_districts
state_legislative_districts = jurisdiction.state_legislative_districts
state_senate_district_to_county = jurisdiction.state_senate_district_to_county
state_legislative_district_to_county = jurisdiction.state_legislative_district_to_county
@property
def contests(self):
"""
Return a list of contest slugs.
"""
return list(self.candidate_counts.keys())
def candidate_counts_filename(self):
bits = self.election_id.split('-')
tpl ="candidate_counts__{year}{month}{day}__{state}__{election_type}.csv"
return tpl.format(year=bits[1], month=bits[2], day=bits[3],
state=bits[0], election_type=bits[4])
def load_candidate_counts(self, skip_zero=True):
"""
Load candidate counts from a CSV fixture
Args:
skip_zero: Should contests with zero candidates be ignored? Default is True.
"""
pwd = os.path.abspath(os.path.dirname(__file__))
filename = os.path.join(pwd, 'fixtures', self.candidate_counts_filename())
with open(filename, 'rU') as csvfile:
self.candidate_counts = {}
reader = unicodecsv.DictReader(csvfile)
for row in reader:
count = int(row['count'].strip())
contest = row['contest'].strip()
if count == 0 and skip_zero:
continue
self.candidate_counts[contest] = count
def get_party_contests(self, contest):
contests = []
if self.race_type == 'primary' and self.primary_type == 'closed':
for party in ('d', 'r'):
contests.append('{0}-{1}'.format(contest, party))
else:
contests.append(contest)
return contests
def _get_candidate_count(self, base_contest):
count = 0
for contest in self.get_party_contests(base_contest):
try:
count += self.candidate_counts[contest]
except KeyError:
print("WARN: no candidate count for contest '{0}'".format(
contest))
return count
def _get_num_district_results(self, contest_slug, districts,
district_to_county=None):
num_results = 0
contest_tpl = contest_slug + '-{0}'
for district in districts:
base_contest = contest_tpl.format(district.lower())
for contest in self.get_party_contests(base_contest):
try:
num_candidates = self.candidate_counts[contest]
if district_to_county:
num_results += len(district_to_county[district]) * num_candidates
else:
num_results += num_candidates
except KeyError:
pass
return num_results
# Generic validation helpers
def validate_contests(self):
expected_contest_slugs = self.contests
contests = Contest.objects.filter(election_id=self.election_id)
expected = len(expected_contest_slugs)
count = contests.count()
assert count == expected, ("There should be %d contests, but there are %d" %
(expected, count))
for slug in expected_contest_slugs:
try:
contests.get(slug=slug)
except Contest.DoesNotExist:
raise Contest.DoesNotExist("No contest with slug '%s' found" %
slug)
def validate_candidate_count(self):
candidate_counts = self.candidate_counts
candidates = Candidate.objects.filter(election_id=self.election_id)
for contest_slug, expected_count in list(candidate_counts.items()):
count = candidates.filter(contest_slug=contest_slug).count()
assert count == expected_count, ("There should be %d candidates "
"for the contest '%s', but there are %d" %
(expected_count, contest_slug, count))
def validate_result_count(self, reporting_levels=None):
failed_levels = []
if reporting_levels == None:
reporting_levels = self.reporting_levels
for level in reporting_levels:
try:
self._validate_result_count_for_reporting_level(level)
except AssertionError as e:
print(e)
failed_levels.append(level)
assert len(failed_levels) == 0, ("Result count does not match the expected "
"value for these levels: {0}".format(", ".join(failed_levels)))
def _validate_result_count_for_reporting_level(self, level):
results = Result.objects.filter(election_id=self.election_id,
reporting_level=level)
expected = getattr(self, 'num_%s_results' %
level)
count = results.count()
assert count == expected, ("Expected %d results for reporting level %s. "
"Instead there are %d" % (expected, level, count))
class CountyCongressResultsMixin(object):
@property
def num_county_results_congress(self):
return self._get_num_district_results('us-house-of-representatives',
self.congressional_districts,
self.congressional_district_to_county)
class CountyStateSenateResultsMixin(object):
@property
def num_county_results_state_senate(self):
return self._get_num_district_results('state-senate',
self.state_senate_districts,
self.state_senate_district_to_county)
class CountyStateLegislatureResultsMixin(object):
@property
def num_county_results_state_legislature(self):
return self._get_num_district_results('house-of-delegates',
self.state_legislative_districts,
self.state_legislative_district_to_county)
class StateLegislativeResultsMixin(object):
@property
def num_state_legislative_results(self):
total_candidates = reduce(lambda x, y: x+y,
list(self.candidate_counts.values()))
return total_candidates * len(self.state_legislative_districts)
class Election2000(CountyCongressResultsMixin, MDElection):
congressional_district_to_county = jurisdiction.congressional_district_to_county_pre_2002
@property
def num_county_results(self):
num_results = 0
num_counties = len(self.counties)
num_pres_candidates = self._get_candidate_count('president')
num_senate_candidates = self._get_candidate_count('us-senate')
num_results += num_pres_candidates * num_counties
num_results += num_senate_candidates * num_counties
num_results += self.num_county_results_congress
return num_results
class Election2000Primary(Election2000):
election_id = 'md-2000-03-07-primary'
race_type = 'primary'
primary_type = 'closed'
reporting_levels = ['county', 'congressional_district']
candidate_counts = {
# 4 candidates, including "Uncommitted To Any Presidential Candidate"
'president-d': 4,
'president-r': 6,
'us-senate-d': 3,
'us-senate-r': 8,
'us-house-of-representatives-1-d': 4,
'us-house-of-representatives-1-r': 1,
'us-house-of-representatives-2-d': 4,
'us-house-of-representatives-2-r': 1,
'us-house-of-representatives-3-d': 1,
'us-house-of-representatives-3-r': 1,
'us-house-of-representatives-4-d': 2,
'us-house-of-representatives-4-r': 1,
'us-house-of-representatives-5-d': 2,
'us-house-of-representatives-5-r': 1,
'us-house-of-representatives-6-d': 4,
'us-house-of-representatives-6-r': 2,
'us-house-of-representatives-7-d': 1,
'us-house-of-representatives-7-r': 2,
'us-house-of-representatives-8-d': 5,
'us-house-of-representatives-8-r': 1,
}
@property
def num_congressional_district_results(self):
num_results = 0
num_congressional_districts = len(self.congressional_districts)
num_pres_candidates = self._get_candidate_count('president')
num_results += num_pres_candidates * num_congressional_districts
num_results += self._get_num_district_results(
'us-house-of-representatives',
self.congressional_districts)
return num_results
class Election2000General(StateLegislativeResultsMixin, Election2000):
election_id = 'md-2000-11-07-general'
race_type = 'general'
candidate_counts = {
'president': 21,
'us-senate': 4,
'us-house-of-representatives-1': 4,
'us-house-of-representatives-2': 3,
'us-house-of-representatives-3': 4,
'us-house-of-representatives-4': 4,
'us-house-of-representatives-5': 3,
'us-house-of-representatives-6': 4,
'us-house-of-representatives-7': 3,
'us-house-of-representatives-8': 6,
}
class Election2002(CountyCongressResultsMixin, CountyStateSenateResultsMixin,
CountyStateLegislatureResultsMixin, MDElection):
reporting_level = ['county']
congressional_district_to_county = jurisdiction.congressional_district_to_county_2002
def __init__(self):
self.load_candidate_counts()
@property
def num_county_results(self):
num_results = 0
num_counties = len(self.counties)
num_gov_candidates = self._get_candidate_count('governor')
num_comptroller_candidates = self._get_candidate_count('comptroller')
num_ag_candidates = self._get_candidate_count('attorney-general')
num_results += num_gov_candidates * num_counties
num_results += num_comptroller_candidates * num_counties
num_results += num_ag_candidates * num_counties
num_results += self.num_county_results_congress
num_results += self.num_county_results_state_senate
num_results += self.num_county_results_state_legislature
return num_results
class Election2002Primary(Election2002):
election_id = 'md-2002-09-10-primary'
race_type = 'primary'
primary_type = 'closed'
class Election2002General(Election2002):
election_id = 'md-2002-11-05-general'
race_type = 'general'
def __init__(self):
self.load_candidate_counts()
class Election2004(StateLegislativeResultsMixin, CountyCongressResultsMixin,
MDElection):
reporting_levels = ['county', 'precinct', 'state_legislative']
congressional_district_to_county = jurisdiction.congressional_district_to_county_2002
@property
def num_county_results(self):
num_results = 0
num_counties = len(self.counties)
num_pres_candidates = self._get_candidate_count('president')
num_senate_candidates = self._get_candidate_count('us-senate')
num_results += num_pres_candidates * num_counties
num_results += num_senate_candidates * num_counties
num_results += self.num_county_results_congress
return num_results
class Election2004Primary(Election2004):
election_id = 'md-2004-03-02-primary'
race_type = 'primary'
primary_type = 'closed'
candidate_counts = {
'president-d': 12,
'president-r': 1,
'us-senate-d': 3,
'us-senate-r': 9,
'us-house-of-representatives-1-d': 4,
'us-house-of-representatives-1-r': 2,
'us-house-of-representatives-2-d': 1,
'us-house-of-representatives-2-r': 3,
'us-house-of-representatives-3-d': 2,
'us-house-of-representatives-3-r': 3,
'us-house-of-representatives-4-d': 2,
'us-house-of-representatives-4-r': 6,
'us-house-of-representatives-5-d': 1,
'us-house-of-representatives-5-r': 3,
'us-house-of-representatives-6-d': 7,
'us-house-of-representatives-6-r': 2,
'us-house-of-representatives-7-d': 2,
'us-house-of-representatives-7-r': 3,
'us-house-of-representatives-8-d': 3,
'us-house-of-representatives-8-r': 3,
}
class Election2004General(Election2004):
election_id = 'md-2004-11-02-general'
race_type = 'general'
candidate_counts = {
'president': 12,
'us-senate': 8,
'us-house-of-representatives-1': 3,
'us-house-of-representatives-2': 4,
'us-house-of-representatives-3': 4,
'us-house-of-representatives-4': 5,
'us-house-of-representatives-5': 5,
'us-house-of-representatives-6': 4,
'us-house-of-representatives-7': 4,
'us-house-of-representatives-8': 4,
}
class Election2006(StateLegislativeResultsMixin, CountyCongressResultsMixin,
CountyStateSenateResultsMixin, CountyStateLegislatureResultsMixin,
MDElection):
reporting_levels = ['county', 'precinct', 'state_legislative']
congressional_district_to_county = jurisdiction.congressional_district_to_county_2002
def __init__(self):
self.load_candidate_counts()
@property
def num_county_results(self):
num_results = 0
num_counties = len(self.counties)
num_gov_candidates = self._get_candidate_count('governor')
num_comptroller_candidates = self._get_candidate_count('comptroller')
num_ag_candidates = self._get_candidate_count('attorney-general')
num_senate_candidates = self._get_candidate_count('us-senate')
num_results += num_gov_candidates * num_counties
num_results += num_comptroller_candidates * num_counties
num_results += num_ag_candidates * num_counties
num_results += num_senate_candidates * num_counties
num_results += self.num_county_results_congress
num_results += self.num_county_results_state_senate
num_results += self.num_county_results_state_legislature
return num_results
class Election2006Primary(Election2006):
election_id = 'md-2006-09-12-primary'
race_type = 'primary'
primary_type = 'closed'
class Election2006General(Election2006):
election_id = 'md-2006-11-07-general'
race_type = 'general'
class Election2008(CountyCongressResultsMixin, StateLegislativeResultsMixin, MDElection):
reporting_levels = ['county', 'precinct', 'state_legislative']
congressional_district_to_county = jurisdiction.congressional_district_to_county_2002
@property
def num_county_results(self):
num_results = 0
num_counties = len(self.counties)
num_pres_candidates = self._get_candidate_count('president')
num_results += num_pres_candidates * num_counties
num_results += self.num_county_results_congress
return num_results
class Election2008Primary(Election2008):
election_id = 'md-2008-02-12-primary'
race_type = 'primary'
primary_type = 'closed'
candidate_counts = {
'president-d': 9,
'president-r': 9,
'us-house-of-representatives-1-d': 4,
'us-house-of-representatives-1-r': 5,
'us-house-of-representatives-2-d': 1,
'us-house-of-representatives-2-r': 1,
'us-house-of-representatives-3-d': 2,
'us-house-of-representatives-3-r': 4,
'us-house-of-representatives-4-d': 6,
'us-house-of-representatives-4-r': 4,
'us-house-of-representatives-5-d': 2,
'us-house-of-representatives-5-r': 3,
'us-house-of-representatives-6-d': 5,
'us-house-of-representatives-6-r': 5,
'us-house-of-representatives-7-d': 2,
'us-house-of-representatives-7-r': 2,
'us-house-of-representatives-8-d': 3,
'us-house-of-representatives-8-r': 5,
}
class Election2008Special(CountyCongressResultsMixin, MDElection):
election_id = 'md-2008-06-17-special-general'
race_type = 'general'
reporting_levels = ['county',]
congressional_district_to_county = jurisdiction.congressional_district_to_county_2002
candidate_counts = {
'us-house-of-representatives-4': 6,
}
@property
def num_county_results(self):
return self.num_county_results_congress
class Election2008General(Election2008):
election_id = 'md-2008-11-04-general'
race_type = 'general'
candidate_counts = {
'president': 24,
'us-house-of-representatives-1': 4,
'us-house-of-representatives-2': 4,
'us-house-of-representatives-3': 3,
'us-house-of-representatives-4': 7,
'us-house-of-representatives-5': 4,
'us-house-of-representatives-6': 4,
'us-house-of-representatives-7': 6,
'us-house-of-representatives-8': 7,
}
class Election2010(StateLegislativeResultsMixin, CountyCongressResultsMixin,
CountyStateSenateResultsMixin, CountyStateLegislatureResultsMixin,
MDElection):
reporting_levels = ['county', 'precinct', 'state_legislative']
congressional_district_to_county = jurisdiction.congressional_district_to_county_2002
def __init__(self):
self.load_candidate_counts()
@property
def num_county_results(self):
num_results = 0
num_counties = len(self.counties)
num_gov_candidates = self._get_candidate_count('governor')
num_comptroller_candidates = self._get_candidate_count('comptroller')
num_ag_candidates = self._get_candidate_count('attorney-general')
num_senate_candidates = self._get_candidate_count('us-senate')
num_results += num_gov_candidates * num_counties
num_results += num_comptroller_candidates * num_counties
num_results += num_ag_candidates * num_counties
num_results += num_senate_candidates * num_counties
num_results += self.num_county_results_congress
num_results += self.num_county_results_state_senate
num_results += self.num_county_results_state_legislature
return num_results
class Election2010Primary(Election2010):
election_id = 'md-2010-09-14-primary'
race_type = 'primary'
primary_type = 'closed'
class Election2010General(Election2010):
election_id = 'md-2010-11-02-general'
race_type = 'general'
class Election2012(StateLegislativeResultsMixin, CountyCongressResultsMixin,
MDElection):
reporting_levels = ['county', 'precinct', 'state_legislative']
congressional_district_to_county = jurisdiction.congressional_districts_to_county_2011
@property
def num_county_results(self):
num_results = 0
num_counties = len(self.counties)
num_pres_candidates = self._get_candidate_count('president')
num_senate_candidates = self._get_candidate_count('us-senate')
num_results += num_pres_candidates * num_counties
num_results += num_senate_candidates * num_counties
num_results += self.num_county_results_congress
return num_results
class Election2012Primary(Election2012):
election_id = 'md-2012-04-03-primary'
race_type = 'primary'
primary_type = 'closed'
candidate_counts = {
'president-d': 2,
'president-r': 8,
'us-senate-d': 9,
'us-senate-r': 10,
'us-house-of-representatives-1-d': 3,
'us-house-of-representatives-1-r': 1,
'us-house-of-representatives-2-d': 1,
'us-house-of-representatives-2-r': 6,
'us-house-of-representatives-3-d': 2,
'us-house-of-representatives-3-r': 4,
'us-house-of-representatives-4-d': 3,
'us-house-of-representatives-4-r': 4,
'us-house-of-representatives-5-d': 2,
'us-house-of-representatives-5-r': 3,
'us-house-of-representatives-6-d': 5,
'us-house-of-representatives-6-r': 8,
'us-house-of-representatives-7-d': 3,
'us-house-of-representatives-7-r': 2,
'us-house-of-representatives-8-d': 2,
'us-house-of-representatives-8-r': 4,
}
class Election2012General(Election2012):
election_id = 'md-2012-11-06-general'
race_type = 'general'
candidate_counts = {
'president': 37,
'us-senate': 9,
'us-house-of-representatives-1': 7,
'us-house-of-representatives-2': 5,
'us-house-of-representatives-3': 4,
'us-house-of-representatives-4': 4,
'us-house-of-representatives-5': 5,
'us-house-of-representatives-6': 4,
'us-house-of-representatives-7': 6,
'us-house-of-representatives-8': 5,
}
| mit | cb1399c9dfa06a111b4371b6e304b793 | 33.329053 | 93 | 0.63908 | 3.396379 | false | true | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.