text
stringlengths 2
999k
|
|---|
import torch
import time
import numpy as np
from all.core import State
from .writer import ExperimentWriter, CometWriter
from .experiment import Experiment
from all.environments import VectorEnvironment
from all.agents import ParallelAgent
import gym
class ParallelEnvExperiment(Experiment):
'''An Experiment object for training and testing agents that use parallel training environments.'''
def __init__(
self,
preset,
env,
name=None,
train_steps=float('inf'),
logdir='runs',
quiet=False,
render=False,
write_loss=True,
writer="tensorboard"
):
self._name = name if name is not None else preset.name
super().__init__(self._make_writer(logdir, self._name, env.name, write_loss, writer), quiet)
self._n_envs = preset.n_envs
if isinstance(env, VectorEnvironment):
assert self._n_envs == env.num_envs
self._env = env
else:
self._env = env.duplicate(self._n_envs)
self._preset = preset
self._agent = preset.agent(writer=self._writer, train_steps=train_steps)
self._render = render
# training state
self._returns = []
self._frame = 1
self._episode = 1
self._episode_start_times = [] * self._n_envs
self._episode_start_frames = [] * self._n_envs
# test state
self._test_episodes = 100
self._test_episodes_started = self._n_envs
self._test_returns = []
self._should_save_returns = [True] * self._n_envs
if render:
for _env in self._envs:
_env.render(mode="human")
@property
def frame(self):
return self._frame
@property
def episode(self):
return self._episode
def train(self, frames=np.inf, episodes=np.inf):
num_envs = int(self._env.num_envs)
returns = np.zeros(num_envs)
state_array = self._env.reset()
start_time = time.time()
completed_frames = 0
while not self._done(frames, episodes):
action = self._agent.act(state_array)
state_array = self._env.step(action)
self._frame += num_envs
episodes_completed = state_array.done.type(torch.IntTensor).sum().item()
completed_frames += num_envs
returns += state_array.reward.cpu().detach().numpy()
if episodes_completed > 0:
dones = state_array.done.cpu().detach().numpy()
cur_time = time.time()
fps = completed_frames / (cur_time - start_time)
completed_frames = 0
start_time = cur_time
for i in range(num_envs):
if dones[i]:
self._log_training_episode(returns[i], fps)
returns[i] = 0
self._episode += episodes_completed
def test(self, episodes=100):
test_agent = self._preset.parallel_test_agent()
# Note that we need to record the first N episodes that are STARTED,
# not the first N that are completed, or we introduce bias.
test_returns = []
episodes_started = self._n_envs
should_record = [True] * self._n_envs
# initialize state
states = self._env.reset()
returns = states.reward.clone()
while len(test_returns) < episodes:
# step the agent and environments
actions = test_agent.act(states)
states = self._env.step(actions)
returns += states.reward
# record any episodes that have finished
for i, done in enumerate(states.done):
if done:
if should_record[i] and len(test_returns) < episodes:
episode_return = returns[i].item()
test_returns.append(episode_return)
self._log_test_episode(len(test_returns), episode_return)
returns[i] = 0.
episodes_started += 1
if episodes_started > episodes:
should_record[i] = False
self._log_test(test_returns)
return test_returns
def _done(self, frames, episodes):
return self._frame > frames or self._episode > episodes
def _make_writer(self, logdir, agent_name, env_name, write_loss, writer):
if writer == "comet":
return CometWriter(self, agent_name, env_name, loss=write_loss, logdir=logdir)
return ExperimentWriter(self, agent_name, env_name, loss=write_loss, logdir=logdir)
|
import json
from os.path import join, dirname, realpath
with open(join(dirname(realpath(__file__)), "input.txt")) as f:
obj = json.load(f)
def sum_rec(o):
if isinstance(o, int):
return o
if isinstance(o, list):
return sum(map(sum_rec, o))
if isinstance(o, dict):
if "red" in o.values():
return 0
return sum(map(sum_rec, o.values()))
return 0
print(sum_rec(obj))
|
import pickle
import sys
import time
from datetime import date, datetime, timedelta
import dateutil
import pytest
import pytz
import simplejson as json
from dateutil import tz
from dateutil.relativedelta import FR, MO, SA, SU, TH, TU, WE
from arrow import arrow, locales
from .utils import assert_datetime_equality
class TestTestArrowInit:
def test_init_bad_input(self):
with pytest.raises(TypeError):
arrow.Arrow(2013)
with pytest.raises(TypeError):
arrow.Arrow(2013, 2)
with pytest.raises(ValueError):
arrow.Arrow(2013, 2, 2, 12, 30, 45, 9999999)
def test_init(self):
result = arrow.Arrow(2013, 2, 2)
self.expected = datetime(2013, 2, 2, tzinfo=tz.tzutc())
assert result._datetime == self.expected
result = arrow.Arrow(2013, 2, 2, 12)
self.expected = datetime(2013, 2, 2, 12, tzinfo=tz.tzutc())
assert result._datetime == self.expected
result = arrow.Arrow(2013, 2, 2, 12, 30)
self.expected = datetime(2013, 2, 2, 12, 30, tzinfo=tz.tzutc())
assert result._datetime == self.expected
result = arrow.Arrow(2013, 2, 2, 12, 30, 45)
self.expected = datetime(2013, 2, 2, 12, 30, 45, tzinfo=tz.tzutc())
assert result._datetime == self.expected
result = arrow.Arrow(2013, 2, 2, 12, 30, 45, 999999)
self.expected = datetime(2013, 2, 2, 12, 30, 45, 999999, tzinfo=tz.tzutc())
assert result._datetime == self.expected
result = arrow.Arrow(
2013, 2, 2, 12, 30, 45, 999999, tzinfo=tz.gettz("Europe/Paris")
)
self.expected = datetime(
2013, 2, 2, 12, 30, 45, 999999, tzinfo=tz.gettz("Europe/Paris")
)
assert result._datetime == self.expected
# regression tests for issue #626
def test_init_pytz_timezone(self):
result = arrow.Arrow(
2013, 2, 2, 12, 30, 45, 999999, tzinfo=pytz.timezone("Europe/Paris")
)
self.expected = datetime(
2013, 2, 2, 12, 30, 45, 999999, tzinfo=tz.gettz("Europe/Paris")
)
assert result._datetime == self.expected
assert_datetime_equality(result._datetime, self.expected, 1)
def test_init_with_fold(self):
before = arrow.Arrow(2017, 10, 29, 2, 0, tzinfo="Europe/Stockholm")
after = arrow.Arrow(2017, 10, 29, 2, 0, tzinfo="Europe/Stockholm", fold=1)
assert hasattr(before, "fold")
assert hasattr(after, "fold")
# PEP-495 requires the comparisons below to be true
assert before == after
assert before.utcoffset() != after.utcoffset()
class TestTestArrowFactory:
def test_now(self):
result = arrow.Arrow.now()
assert_datetime_equality(
result._datetime, datetime.now().replace(tzinfo=tz.tzlocal())
)
def test_utcnow(self):
result = arrow.Arrow.utcnow()
assert_datetime_equality(
result._datetime, datetime.utcnow().replace(tzinfo=tz.tzutc())
)
assert result.fold == 0
def test_fromtimestamp(self):
timestamp = time.time()
result = arrow.Arrow.fromtimestamp(timestamp)
assert_datetime_equality(
result._datetime, datetime.now().replace(tzinfo=tz.tzlocal())
)
result = arrow.Arrow.fromtimestamp(timestamp, tzinfo=tz.gettz("Europe/Paris"))
assert_datetime_equality(
result._datetime,
datetime.fromtimestamp(timestamp, tz.gettz("Europe/Paris")),
)
result = arrow.Arrow.fromtimestamp(timestamp, tzinfo="Europe/Paris")
assert_datetime_equality(
result._datetime,
datetime.fromtimestamp(timestamp, tz.gettz("Europe/Paris")),
)
with pytest.raises(ValueError):
arrow.Arrow.fromtimestamp("invalid timestamp")
def test_utcfromtimestamp(self):
timestamp = time.time()
result = arrow.Arrow.utcfromtimestamp(timestamp)
assert_datetime_equality(
result._datetime, datetime.utcnow().replace(tzinfo=tz.tzutc())
)
with pytest.raises(ValueError):
arrow.Arrow.utcfromtimestamp("invalid timestamp")
def test_fromdatetime(self):
dt = datetime(2013, 2, 3, 12, 30, 45, 1)
result = arrow.Arrow.fromdatetime(dt)
assert result._datetime == dt.replace(tzinfo=tz.tzutc())
def test_fromdatetime_dt_tzinfo(self):
dt = datetime(2013, 2, 3, 12, 30, 45, 1, tzinfo=tz.gettz("US/Pacific"))
result = arrow.Arrow.fromdatetime(dt)
assert result._datetime == dt.replace(tzinfo=tz.gettz("US/Pacific"))
def test_fromdatetime_tzinfo_arg(self):
dt = datetime(2013, 2, 3, 12, 30, 45, 1)
result = arrow.Arrow.fromdatetime(dt, tz.gettz("US/Pacific"))
assert result._datetime == dt.replace(tzinfo=tz.gettz("US/Pacific"))
def test_fromdate(self):
dt = date(2013, 2, 3)
result = arrow.Arrow.fromdate(dt, tz.gettz("US/Pacific"))
assert result._datetime == datetime(2013, 2, 3, tzinfo=tz.gettz("US/Pacific"))
def test_strptime(self):
formatted = datetime(2013, 2, 3, 12, 30, 45).strftime("%Y-%m-%d %H:%M:%S")
result = arrow.Arrow.strptime(formatted, "%Y-%m-%d %H:%M:%S")
assert result._datetime == datetime(2013, 2, 3, 12, 30, 45, tzinfo=tz.tzutc())
result = arrow.Arrow.strptime(
formatted, "%Y-%m-%d %H:%M:%S", tzinfo=tz.gettz("Europe/Paris")
)
assert result._datetime == datetime(
2013, 2, 3, 12, 30, 45, tzinfo=tz.gettz("Europe/Paris")
)
def test_fromordinal(self):
timestamp = 1607066909.937968
with pytest.raises(TypeError):
arrow.Arrow.fromordinal(timestamp)
with pytest.raises(ValueError):
arrow.Arrow.fromordinal(int(timestamp))
ordinal = arrow.Arrow.utcnow().toordinal()
with pytest.raises(TypeError):
arrow.Arrow.fromordinal(str(ordinal))
result = arrow.Arrow.fromordinal(ordinal)
dt = datetime.fromordinal(ordinal)
assert result.naive == dt
@pytest.mark.usefixtures("time_2013_02_03")
class TestTestArrowRepresentation:
def test_repr(self):
result = self.arrow.__repr__()
assert result == f"<Arrow [{self.arrow._datetime.isoformat()}]>"
def test_str(self):
result = self.arrow.__str__()
assert result == self.arrow._datetime.isoformat()
def test_hash(self):
result = self.arrow.__hash__()
assert result == self.arrow._datetime.__hash__()
def test_format(self):
result = f"{self.arrow:YYYY-MM-DD}"
assert result == "2013-02-03"
def test_bare_format(self):
result = self.arrow.format()
assert result == "2013-02-03 12:30:45+00:00"
def test_format_no_format_string(self):
result = f"{self.arrow}"
assert result == str(self.arrow)
def test_clone(self):
result = self.arrow.clone()
assert result is not self.arrow
assert result._datetime == self.arrow._datetime
@pytest.mark.usefixtures("time_2013_01_01")
class TestArrowAttribute:
def test_getattr_base(self):
with pytest.raises(AttributeError):
self.arrow.prop
def test_getattr_week(self):
assert self.arrow.week == 1
def test_getattr_quarter(self):
# start dates
q1 = arrow.Arrow(2013, 1, 1)
q2 = arrow.Arrow(2013, 4, 1)
q3 = arrow.Arrow(2013, 8, 1)
q4 = arrow.Arrow(2013, 10, 1)
assert q1.quarter == 1
assert q2.quarter == 2
assert q3.quarter == 3
assert q4.quarter == 4
# end dates
q1 = arrow.Arrow(2013, 3, 31)
q2 = arrow.Arrow(2013, 6, 30)
q3 = arrow.Arrow(2013, 9, 30)
q4 = arrow.Arrow(2013, 12, 31)
assert q1.quarter == 1
assert q2.quarter == 2
assert q3.quarter == 3
assert q4.quarter == 4
def test_getattr_dt_value(self):
assert self.arrow.year == 2013
def test_tzinfo(self):
assert self.arrow.tzinfo == tz.tzutc()
def test_naive(self):
assert self.arrow.naive == self.arrow._datetime.replace(tzinfo=None)
def test_timestamp(self):
assert self.arrow.timestamp() == self.arrow._datetime.timestamp()
def test_int_timestamp(self):
assert self.arrow.int_timestamp == int(self.arrow._datetime.timestamp())
def test_float_timestamp(self):
assert self.arrow.float_timestamp == self.arrow._datetime.timestamp()
def test_getattr_fold(self):
# UTC is always unambiguous
assert self.now.fold == 0
ambiguous_dt = arrow.Arrow(
2017, 10, 29, 2, 0, tzinfo="Europe/Stockholm", fold=1
)
assert ambiguous_dt.fold == 1
with pytest.raises(AttributeError):
ambiguous_dt.fold = 0
def test_getattr_ambiguous(self):
assert not self.now.ambiguous
ambiguous_dt = arrow.Arrow(2017, 10, 29, 2, 0, tzinfo="Europe/Stockholm")
assert ambiguous_dt.ambiguous
def test_getattr_imaginary(self):
assert not self.now.imaginary
imaginary_dt = arrow.Arrow(2013, 3, 31, 2, 30, tzinfo="Europe/Paris")
assert imaginary_dt.imaginary
@pytest.mark.usefixtures("time_utcnow")
class TestArrowComparison:
def test_eq(self):
assert self.arrow == self.arrow
assert self.arrow == self.arrow.datetime
assert not (self.arrow == "abc")
def test_ne(self):
assert not (self.arrow != self.arrow)
assert not (self.arrow != self.arrow.datetime)
assert self.arrow != "abc"
def test_gt(self):
arrow_cmp = self.arrow.shift(minutes=1)
assert not (self.arrow > self.arrow)
assert not (self.arrow > self.arrow.datetime)
with pytest.raises(TypeError):
self.arrow > "abc" # noqa: B015
assert self.arrow < arrow_cmp
assert self.arrow < arrow_cmp.datetime
def test_ge(self):
with pytest.raises(TypeError):
self.arrow >= "abc" # noqa: B015
assert self.arrow >= self.arrow
assert self.arrow >= self.arrow.datetime
def test_lt(self):
arrow_cmp = self.arrow.shift(minutes=1)
assert not (self.arrow < self.arrow)
assert not (self.arrow < self.arrow.datetime)
with pytest.raises(TypeError):
self.arrow < "abc" # noqa: B015
assert self.arrow < arrow_cmp
assert self.arrow < arrow_cmp.datetime
def test_le(self):
with pytest.raises(TypeError):
self.arrow <= "abc" # noqa: B015
assert self.arrow <= self.arrow
assert self.arrow <= self.arrow.datetime
@pytest.mark.usefixtures("time_2013_01_01")
class TestArrowMath:
def test_add_timedelta(self):
result = self.arrow.__add__(timedelta(days=1))
assert result._datetime == datetime(2013, 1, 2, tzinfo=tz.tzutc())
def test_add_other(self):
with pytest.raises(TypeError):
self.arrow + 1
def test_radd(self):
result = self.arrow.__radd__(timedelta(days=1))
assert result._datetime == datetime(2013, 1, 2, tzinfo=tz.tzutc())
def test_sub_timedelta(self):
result = self.arrow.__sub__(timedelta(days=1))
assert result._datetime == datetime(2012, 12, 31, tzinfo=tz.tzutc())
def test_sub_datetime(self):
result = self.arrow.__sub__(datetime(2012, 12, 21, tzinfo=tz.tzutc()))
assert result == timedelta(days=11)
def test_sub_arrow(self):
result = self.arrow.__sub__(arrow.Arrow(2012, 12, 21, tzinfo=tz.tzutc()))
assert result == timedelta(days=11)
def test_sub_other(self):
with pytest.raises(TypeError):
self.arrow - object()
def test_rsub_datetime(self):
result = self.arrow.__rsub__(datetime(2012, 12, 21, tzinfo=tz.tzutc()))
assert result == timedelta(days=-11)
def test_rsub_other(self):
with pytest.raises(TypeError):
timedelta(days=1) - self.arrow
@pytest.mark.usefixtures("time_utcnow")
class TestArrowDatetimeInterface:
def test_date(self):
result = self.arrow.date()
assert result == self.arrow._datetime.date()
def test_time(self):
result = self.arrow.time()
assert result == self.arrow._datetime.time()
def test_timetz(self):
result = self.arrow.timetz()
assert result == self.arrow._datetime.timetz()
def test_astimezone(self):
other_tz = tz.gettz("US/Pacific")
result = self.arrow.astimezone(other_tz)
assert result == self.arrow._datetime.astimezone(other_tz)
def test_utcoffset(self):
result = self.arrow.utcoffset()
assert result == self.arrow._datetime.utcoffset()
def test_dst(self):
result = self.arrow.dst()
assert result == self.arrow._datetime.dst()
def test_timetuple(self):
result = self.arrow.timetuple()
assert result == self.arrow._datetime.timetuple()
def test_utctimetuple(self):
result = self.arrow.utctimetuple()
assert result == self.arrow._datetime.utctimetuple()
def test_toordinal(self):
result = self.arrow.toordinal()
assert result == self.arrow._datetime.toordinal()
def test_weekday(self):
result = self.arrow.weekday()
assert result == self.arrow._datetime.weekday()
def test_isoweekday(self):
result = self.arrow.isoweekday()
assert result == self.arrow._datetime.isoweekday()
def test_isocalendar(self):
result = self.arrow.isocalendar()
assert result == self.arrow._datetime.isocalendar()
def test_isoformat(self):
result = self.arrow.isoformat()
assert result == self.arrow._datetime.isoformat()
def test_isoformat_timespec(self):
result = self.arrow.isoformat(timespec="hours")
assert result == self.arrow._datetime.isoformat(timespec="hours")
result = self.arrow.isoformat(timespec="microseconds")
assert result == self.arrow._datetime.isoformat()
result = self.arrow.isoformat(timespec="milliseconds")
assert result == self.arrow._datetime.isoformat(timespec="milliseconds")
result = self.arrow.isoformat(sep="x", timespec="seconds")
assert result == self.arrow._datetime.isoformat(sep="x", timespec="seconds")
def test_simplejson(self):
result = json.dumps({"v": self.arrow.for_json()}, for_json=True)
assert json.loads(result)["v"] == self.arrow._datetime.isoformat()
def test_ctime(self):
result = self.arrow.ctime()
assert result == self.arrow._datetime.ctime()
def test_strftime(self):
result = self.arrow.strftime("%Y")
assert result == self.arrow._datetime.strftime("%Y")
class TestArrowFalsePositiveDst:
"""These tests relate to issues #376 and #551.
The key points in both issues are that arrow will assign a UTC timezone if none is provided and
.to() will change other attributes to be correct whereas .replace() only changes the specified attribute.
Issue 376
>>> arrow.get('2016-11-06').to('America/New_York').ceil('day')
< Arrow [2016-11-05T23:59:59.999999-04:00] >
Issue 551
>>> just_before = arrow.get('2018-11-04T01:59:59.999999')
>>> just_before
2018-11-04T01:59:59.999999+00:00
>>> just_after = just_before.shift(microseconds=1)
>>> just_after
2018-11-04T02:00:00+00:00
>>> just_before_eastern = just_before.replace(tzinfo='US/Eastern')
>>> just_before_eastern
2018-11-04T01:59:59.999999-04:00
>>> just_after_eastern = just_after.replace(tzinfo='US/Eastern')
>>> just_after_eastern
2018-11-04T02:00:00-05:00
"""
def test_dst(self):
self.before_1 = arrow.Arrow(
2016, 11, 6, 3, 59, tzinfo=tz.gettz("America/New_York")
)
self.before_2 = arrow.Arrow(2016, 11, 6, tzinfo=tz.gettz("America/New_York"))
self.after_1 = arrow.Arrow(2016, 11, 6, 4, tzinfo=tz.gettz("America/New_York"))
self.after_2 = arrow.Arrow(
2016, 11, 6, 23, 59, tzinfo=tz.gettz("America/New_York")
)
self.before_3 = arrow.Arrow(
2018, 11, 4, 3, 59, tzinfo=tz.gettz("America/New_York")
)
self.before_4 = arrow.Arrow(2018, 11, 4, tzinfo=tz.gettz("America/New_York"))
self.after_3 = arrow.Arrow(2018, 11, 4, 4, tzinfo=tz.gettz("America/New_York"))
self.after_4 = arrow.Arrow(
2018, 11, 4, 23, 59, tzinfo=tz.gettz("America/New_York")
)
assert self.before_1.day == self.before_2.day
assert self.after_1.day == self.after_2.day
assert self.before_3.day == self.before_4.day
assert self.after_3.day == self.after_4.day
class TestArrowConversion:
def test_to(self):
dt_from = datetime.now()
arrow_from = arrow.Arrow.fromdatetime(dt_from, tz.gettz("US/Pacific"))
self.expected = dt_from.replace(tzinfo=tz.gettz("US/Pacific")).astimezone(
tz.tzutc()
)
assert arrow_from.to("UTC").datetime == self.expected
assert arrow_from.to(tz.tzutc()).datetime == self.expected
# issue #368
def test_to_pacific_then_utc(self):
result = arrow.Arrow(2018, 11, 4, 1, tzinfo="-08:00").to("US/Pacific").to("UTC")
assert result == arrow.Arrow(2018, 11, 4, 9)
# issue #368
def test_to_amsterdam_then_utc(self):
result = arrow.Arrow(2016, 10, 30).to("Europe/Amsterdam")
assert result.utcoffset() == timedelta(seconds=7200)
# regression test for #690
def test_to_israel_same_offset(self):
result = arrow.Arrow(2019, 10, 27, 2, 21, 1, tzinfo="+03:00").to("Israel")
expected = arrow.Arrow(2019, 10, 27, 1, 21, 1, tzinfo="Israel")
assert result == expected
assert result.utcoffset() != expected.utcoffset()
# issue 315
def test_anchorage_dst(self):
before = arrow.Arrow(2016, 3, 13, 1, 59, tzinfo="America/Anchorage")
after = arrow.Arrow(2016, 3, 13, 2, 1, tzinfo="America/Anchorage")
assert before.utcoffset() != after.utcoffset()
# issue 476
def test_chicago_fall(self):
result = arrow.Arrow(2017, 11, 5, 2, 1, tzinfo="-05:00").to("America/Chicago")
expected = arrow.Arrow(2017, 11, 5, 1, 1, tzinfo="America/Chicago")
assert result == expected
assert result.utcoffset() != expected.utcoffset()
def test_toronto_gap(self):
before = arrow.Arrow(2011, 3, 13, 6, 30, tzinfo="UTC").to("America/Toronto")
after = arrow.Arrow(2011, 3, 13, 7, 30, tzinfo="UTC").to("America/Toronto")
assert before.datetime.replace(tzinfo=None) == datetime(2011, 3, 13, 1, 30)
assert after.datetime.replace(tzinfo=None) == datetime(2011, 3, 13, 3, 30)
assert before.utcoffset() != after.utcoffset()
def test_sydney_gap(self):
before = arrow.Arrow(2012, 10, 6, 15, 30, tzinfo="UTC").to("Australia/Sydney")
after = arrow.Arrow(2012, 10, 6, 16, 30, tzinfo="UTC").to("Australia/Sydney")
assert before.datetime.replace(tzinfo=None) == datetime(2012, 10, 7, 1, 30)
assert after.datetime.replace(tzinfo=None) == datetime(2012, 10, 7, 3, 30)
assert before.utcoffset() != after.utcoffset()
class TestArrowPickling:
def test_pickle_and_unpickle(self):
dt = arrow.Arrow.utcnow()
pickled = pickle.dumps(dt)
unpickled = pickle.loads(pickled)
assert unpickled == dt
class TestArrowReplace:
def test_not_attr(self):
with pytest.raises(ValueError):
arrow.Arrow.utcnow().replace(abc=1)
def test_replace(self):
arw = arrow.Arrow(2013, 5, 5, 12, 30, 45)
assert arw.replace(year=2012) == arrow.Arrow(2012, 5, 5, 12, 30, 45)
assert arw.replace(month=1) == arrow.Arrow(2013, 1, 5, 12, 30, 45)
assert arw.replace(day=1) == arrow.Arrow(2013, 5, 1, 12, 30, 45)
assert arw.replace(hour=1) == arrow.Arrow(2013, 5, 5, 1, 30, 45)
assert arw.replace(minute=1) == arrow.Arrow(2013, 5, 5, 12, 1, 45)
assert arw.replace(second=1) == arrow.Arrow(2013, 5, 5, 12, 30, 1)
def test_replace_tzinfo(self):
arw = arrow.Arrow.utcnow().to("US/Eastern")
result = arw.replace(tzinfo=tz.gettz("US/Pacific"))
assert result == arw.datetime.replace(tzinfo=tz.gettz("US/Pacific"))
def test_replace_fold(self):
before = arrow.Arrow(2017, 11, 5, 1, tzinfo="America/New_York")
after = before.replace(fold=1)
assert before.fold == 0
assert after.fold == 1
assert before == after
assert before.utcoffset() != after.utcoffset()
def test_replace_fold_and_other(self):
arw = arrow.Arrow(2013, 5, 5, 12, 30, 45)
assert arw.replace(fold=1, minute=50) == arrow.Arrow(2013, 5, 5, 12, 50, 45)
assert arw.replace(minute=50, fold=1) == arrow.Arrow(2013, 5, 5, 12, 50, 45)
def test_replace_week(self):
with pytest.raises(ValueError):
arrow.Arrow.utcnow().replace(week=1)
def test_replace_quarter(self):
with pytest.raises(ValueError):
arrow.Arrow.utcnow().replace(quarter=1)
def test_replace_quarter_and_fold(self):
with pytest.raises(AttributeError):
arrow.utcnow().replace(fold=1, quarter=1)
with pytest.raises(AttributeError):
arrow.utcnow().replace(quarter=1, fold=1)
def test_replace_other_kwargs(self):
with pytest.raises(AttributeError):
arrow.utcnow().replace(abc="def")
class TestArrowShift:
def test_not_attr(self):
now = arrow.Arrow.utcnow()
with pytest.raises(ValueError):
now.shift(abc=1)
with pytest.raises(ValueError):
now.shift(week=1)
def test_shift(self):
arw = arrow.Arrow(2013, 5, 5, 12, 30, 45)
assert arw.shift(years=1) == arrow.Arrow(2014, 5, 5, 12, 30, 45)
assert arw.shift(quarters=1) == arrow.Arrow(2013, 8, 5, 12, 30, 45)
assert arw.shift(quarters=1, months=1) == arrow.Arrow(2013, 9, 5, 12, 30, 45)
assert arw.shift(months=1) == arrow.Arrow(2013, 6, 5, 12, 30, 45)
assert arw.shift(weeks=1) == arrow.Arrow(2013, 5, 12, 12, 30, 45)
assert arw.shift(days=1) == arrow.Arrow(2013, 5, 6, 12, 30, 45)
assert arw.shift(hours=1) == arrow.Arrow(2013, 5, 5, 13, 30, 45)
assert arw.shift(minutes=1) == arrow.Arrow(2013, 5, 5, 12, 31, 45)
assert arw.shift(seconds=1) == arrow.Arrow(2013, 5, 5, 12, 30, 46)
assert arw.shift(microseconds=1) == arrow.Arrow(2013, 5, 5, 12, 30, 45, 1)
# Remember: Python's weekday 0 is Monday
assert arw.shift(weekday=0) == arrow.Arrow(2013, 5, 6, 12, 30, 45)
assert arw.shift(weekday=1) == arrow.Arrow(2013, 5, 7, 12, 30, 45)
assert arw.shift(weekday=2) == arrow.Arrow(2013, 5, 8, 12, 30, 45)
assert arw.shift(weekday=3) == arrow.Arrow(2013, 5, 9, 12, 30, 45)
assert arw.shift(weekday=4) == arrow.Arrow(2013, 5, 10, 12, 30, 45)
assert arw.shift(weekday=5) == arrow.Arrow(2013, 5, 11, 12, 30, 45)
assert arw.shift(weekday=6) == arw
with pytest.raises(IndexError):
arw.shift(weekday=7)
# Use dateutil.relativedelta's convenient day instances
assert arw.shift(weekday=MO) == arrow.Arrow(2013, 5, 6, 12, 30, 45)
assert arw.shift(weekday=MO(0)) == arrow.Arrow(2013, 5, 6, 12, 30, 45)
assert arw.shift(weekday=MO(1)) == arrow.Arrow(2013, 5, 6, 12, 30, 45)
assert arw.shift(weekday=MO(2)) == arrow.Arrow(2013, 5, 13, 12, 30, 45)
assert arw.shift(weekday=TU) == arrow.Arrow(2013, 5, 7, 12, 30, 45)
assert arw.shift(weekday=TU(0)) == arrow.Arrow(2013, 5, 7, 12, 30, 45)
assert arw.shift(weekday=TU(1)) == arrow.Arrow(2013, 5, 7, 12, 30, 45)
assert arw.shift(weekday=TU(2)) == arrow.Arrow(2013, 5, 14, 12, 30, 45)
assert arw.shift(weekday=WE) == arrow.Arrow(2013, 5, 8, 12, 30, 45)
assert arw.shift(weekday=WE(0)) == arrow.Arrow(2013, 5, 8, 12, 30, 45)
assert arw.shift(weekday=WE(1)) == arrow.Arrow(2013, 5, 8, 12, 30, 45)
assert arw.shift(weekday=WE(2)) == arrow.Arrow(2013, 5, 15, 12, 30, 45)
assert arw.shift(weekday=TH) == arrow.Arrow(2013, 5, 9, 12, 30, 45)
assert arw.shift(weekday=TH(0)) == arrow.Arrow(2013, 5, 9, 12, 30, 45)
assert arw.shift(weekday=TH(1)) == arrow.Arrow(2013, 5, 9, 12, 30, 45)
assert arw.shift(weekday=TH(2)) == arrow.Arrow(2013, 5, 16, 12, 30, 45)
assert arw.shift(weekday=FR) == arrow.Arrow(2013, 5, 10, 12, 30, 45)
assert arw.shift(weekday=FR(0)) == arrow.Arrow(2013, 5, 10, 12, 30, 45)
assert arw.shift(weekday=FR(1)) == arrow.Arrow(2013, 5, 10, 12, 30, 45)
assert arw.shift(weekday=FR(2)) == arrow.Arrow(2013, 5, 17, 12, 30, 45)
assert arw.shift(weekday=SA) == arrow.Arrow(2013, 5, 11, 12, 30, 45)
assert arw.shift(weekday=SA(0)) == arrow.Arrow(2013, 5, 11, 12, 30, 45)
assert arw.shift(weekday=SA(1)) == arrow.Arrow(2013, 5, 11, 12, 30, 45)
assert arw.shift(weekday=SA(2)) == arrow.Arrow(2013, 5, 18, 12, 30, 45)
assert arw.shift(weekday=SU) == arw
assert arw.shift(weekday=SU(0)) == arw
assert arw.shift(weekday=SU(1)) == arw
assert arw.shift(weekday=SU(2)) == arrow.Arrow(2013, 5, 12, 12, 30, 45)
def test_shift_negative(self):
arw = arrow.Arrow(2013, 5, 5, 12, 30, 45)
assert arw.shift(years=-1) == arrow.Arrow(2012, 5, 5, 12, 30, 45)
assert arw.shift(quarters=-1) == arrow.Arrow(2013, 2, 5, 12, 30, 45)
assert arw.shift(quarters=-1, months=-1) == arrow.Arrow(2013, 1, 5, 12, 30, 45)
assert arw.shift(months=-1) == arrow.Arrow(2013, 4, 5, 12, 30, 45)
assert arw.shift(weeks=-1) == arrow.Arrow(2013, 4, 28, 12, 30, 45)
assert arw.shift(days=-1) == arrow.Arrow(2013, 5, 4, 12, 30, 45)
assert arw.shift(hours=-1) == arrow.Arrow(2013, 5, 5, 11, 30, 45)
assert arw.shift(minutes=-1) == arrow.Arrow(2013, 5, 5, 12, 29, 45)
assert arw.shift(seconds=-1) == arrow.Arrow(2013, 5, 5, 12, 30, 44)
assert arw.shift(microseconds=-1) == arrow.Arrow(2013, 5, 5, 12, 30, 44, 999999)
# Not sure how practical these negative weekdays are
assert arw.shift(weekday=-1) == arw.shift(weekday=SU)
assert arw.shift(weekday=-2) == arw.shift(weekday=SA)
assert arw.shift(weekday=-3) == arw.shift(weekday=FR)
assert arw.shift(weekday=-4) == arw.shift(weekday=TH)
assert arw.shift(weekday=-5) == arw.shift(weekday=WE)
assert arw.shift(weekday=-6) == arw.shift(weekday=TU)
assert arw.shift(weekday=-7) == arw.shift(weekday=MO)
with pytest.raises(IndexError):
arw.shift(weekday=-8)
assert arw.shift(weekday=MO(-1)) == arrow.Arrow(2013, 4, 29, 12, 30, 45)
assert arw.shift(weekday=TU(-1)) == arrow.Arrow(2013, 4, 30, 12, 30, 45)
assert arw.shift(weekday=WE(-1)) == arrow.Arrow(2013, 5, 1, 12, 30, 45)
assert arw.shift(weekday=TH(-1)) == arrow.Arrow(2013, 5, 2, 12, 30, 45)
assert arw.shift(weekday=FR(-1)) == arrow.Arrow(2013, 5, 3, 12, 30, 45)
assert arw.shift(weekday=SA(-1)) == arrow.Arrow(2013, 5, 4, 12, 30, 45)
assert arw.shift(weekday=SU(-1)) == arw
assert arw.shift(weekday=SU(-2)) == arrow.Arrow(2013, 4, 28, 12, 30, 45)
def test_shift_quarters_bug(self):
arw = arrow.Arrow(2013, 5, 5, 12, 30, 45)
# The value of the last-read argument was used instead of the ``quarters`` argument.
# Recall that the keyword argument dict, like all dicts, is unordered, so only certain
# combinations of arguments would exhibit this.
assert arw.shift(quarters=0, years=1) == arrow.Arrow(2014, 5, 5, 12, 30, 45)
assert arw.shift(quarters=0, months=1) == arrow.Arrow(2013, 6, 5, 12, 30, 45)
assert arw.shift(quarters=0, weeks=1) == arrow.Arrow(2013, 5, 12, 12, 30, 45)
assert arw.shift(quarters=0, days=1) == arrow.Arrow(2013, 5, 6, 12, 30, 45)
assert arw.shift(quarters=0, hours=1) == arrow.Arrow(2013, 5, 5, 13, 30, 45)
assert arw.shift(quarters=0, minutes=1) == arrow.Arrow(2013, 5, 5, 12, 31, 45)
assert arw.shift(quarters=0, seconds=1) == arrow.Arrow(2013, 5, 5, 12, 30, 46)
assert arw.shift(quarters=0, microseconds=1) == arrow.Arrow(
2013, 5, 5, 12, 30, 45, 1
)
def test_shift_positive_imaginary(self):
# Avoid shifting into imaginary datetimes, take into account DST and other timezone changes.
new_york = arrow.Arrow(2017, 3, 12, 1, 30, tzinfo="America/New_York")
assert new_york.shift(hours=+1) == arrow.Arrow(
2017, 3, 12, 3, 30, tzinfo="America/New_York"
)
# pendulum example
paris = arrow.Arrow(2013, 3, 31, 1, 50, tzinfo="Europe/Paris")
assert paris.shift(minutes=+20) == arrow.Arrow(
2013, 3, 31, 3, 10, tzinfo="Europe/Paris"
)
canberra = arrow.Arrow(2018, 10, 7, 1, 30, tzinfo="Australia/Canberra")
assert canberra.shift(hours=+1) == arrow.Arrow(
2018, 10, 7, 3, 30, tzinfo="Australia/Canberra"
)
kiev = arrow.Arrow(2018, 3, 25, 2, 30, tzinfo="Europe/Kiev")
assert kiev.shift(hours=+1) == arrow.Arrow(
2018, 3, 25, 4, 30, tzinfo="Europe/Kiev"
)
# Edge case, the entire day of 2011-12-30 is imaginary in this zone!
apia = arrow.Arrow(2011, 12, 29, 23, tzinfo="Pacific/Apia")
assert apia.shift(hours=+2) == arrow.Arrow(
2011, 12, 31, 1, tzinfo="Pacific/Apia"
)
def test_shift_negative_imaginary(self):
new_york = arrow.Arrow(2011, 3, 13, 3, 30, tzinfo="America/New_York")
assert new_york.shift(hours=-1) == arrow.Arrow(
2011, 3, 13, 3, 30, tzinfo="America/New_York"
)
assert new_york.shift(hours=-2) == arrow.Arrow(
2011, 3, 13, 1, 30, tzinfo="America/New_York"
)
london = arrow.Arrow(2019, 3, 31, 2, tzinfo="Europe/London")
assert london.shift(hours=-1) == arrow.Arrow(
2019, 3, 31, 2, tzinfo="Europe/London"
)
assert london.shift(hours=-2) == arrow.Arrow(
2019, 3, 31, 0, tzinfo="Europe/London"
)
# edge case, crossing the international dateline
apia = arrow.Arrow(2011, 12, 31, 1, tzinfo="Pacific/Apia")
assert apia.shift(hours=-2) == arrow.Arrow(
2011, 12, 31, 23, tzinfo="Pacific/Apia"
)
@pytest.mark.skipif(
dateutil.__version__ < "2.7.1", reason="old tz database (2018d needed)"
)
def test_shift_kiritimati(self):
# corrected 2018d tz database release, will fail in earlier versions
kiritimati = arrow.Arrow(1994, 12, 30, 12, 30, tzinfo="Pacific/Kiritimati")
assert kiritimati.shift(days=+1) == arrow.Arrow(
1995, 1, 1, 12, 30, tzinfo="Pacific/Kiritimati"
)
def shift_imaginary_seconds(self):
# offset has a seconds component
monrovia = arrow.Arrow(1972, 1, 6, 23, tzinfo="Africa/Monrovia")
assert monrovia.shift(hours=+1, minutes=+30) == arrow.Arrow(
1972, 1, 7, 1, 14, 30, tzinfo="Africa/Monrovia"
)
class TestArrowRange:
def test_year(self):
result = list(
arrow.Arrow.range(
"year", datetime(2013, 1, 2, 3, 4, 5), datetime(2016, 4, 5, 6, 7, 8)
)
)
assert result == [
arrow.Arrow(2013, 1, 2, 3, 4, 5),
arrow.Arrow(2014, 1, 2, 3, 4, 5),
arrow.Arrow(2015, 1, 2, 3, 4, 5),
arrow.Arrow(2016, 1, 2, 3, 4, 5),
]
def test_quarter(self):
result = list(
arrow.Arrow.range(
"quarter", datetime(2013, 2, 3, 4, 5, 6), datetime(2013, 5, 6, 7, 8, 9)
)
)
assert result == [
arrow.Arrow(2013, 2, 3, 4, 5, 6),
arrow.Arrow(2013, 5, 3, 4, 5, 6),
]
def test_month(self):
result = list(
arrow.Arrow.range(
"month", datetime(2013, 2, 3, 4, 5, 6), datetime(2013, 5, 6, 7, 8, 9)
)
)
assert result == [
arrow.Arrow(2013, 2, 3, 4, 5, 6),
arrow.Arrow(2013, 3, 3, 4, 5, 6),
arrow.Arrow(2013, 4, 3, 4, 5, 6),
arrow.Arrow(2013, 5, 3, 4, 5, 6),
]
def test_week(self):
result = list(
arrow.Arrow.range(
"week", datetime(2013, 9, 1, 2, 3, 4), datetime(2013, 10, 1, 2, 3, 4)
)
)
assert result == [
arrow.Arrow(2013, 9, 1, 2, 3, 4),
arrow.Arrow(2013, 9, 8, 2, 3, 4),
arrow.Arrow(2013, 9, 15, 2, 3, 4),
arrow.Arrow(2013, 9, 22, 2, 3, 4),
arrow.Arrow(2013, 9, 29, 2, 3, 4),
]
def test_day(self):
result = list(
arrow.Arrow.range(
"day", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 5, 6, 7, 8)
)
)
assert result == [
arrow.Arrow(2013, 1, 2, 3, 4, 5),
arrow.Arrow(2013, 1, 3, 3, 4, 5),
arrow.Arrow(2013, 1, 4, 3, 4, 5),
arrow.Arrow(2013, 1, 5, 3, 4, 5),
]
def test_hour(self):
result = list(
arrow.Arrow.range(
"hour", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 2, 6, 7, 8)
)
)
assert result == [
arrow.Arrow(2013, 1, 2, 3, 4, 5),
arrow.Arrow(2013, 1, 2, 4, 4, 5),
arrow.Arrow(2013, 1, 2, 5, 4, 5),
arrow.Arrow(2013, 1, 2, 6, 4, 5),
]
result = list(
arrow.Arrow.range(
"hour", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 2, 3, 4, 5)
)
)
assert result == [arrow.Arrow(2013, 1, 2, 3, 4, 5)]
def test_minute(self):
result = list(
arrow.Arrow.range(
"minute", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 2, 3, 7, 8)
)
)
assert result == [
arrow.Arrow(2013, 1, 2, 3, 4, 5),
arrow.Arrow(2013, 1, 2, 3, 5, 5),
arrow.Arrow(2013, 1, 2, 3, 6, 5),
arrow.Arrow(2013, 1, 2, 3, 7, 5),
]
def test_second(self):
result = list(
arrow.Arrow.range(
"second", datetime(2013, 1, 2, 3, 4, 5), datetime(2013, 1, 2, 3, 4, 8)
)
)
assert result == [
arrow.Arrow(2013, 1, 2, 3, 4, 5),
arrow.Arrow(2013, 1, 2, 3, 4, 6),
arrow.Arrow(2013, 1, 2, 3, 4, 7),
arrow.Arrow(2013, 1, 2, 3, 4, 8),
]
def test_arrow(self):
result = list(
arrow.Arrow.range(
"day",
arrow.Arrow(2013, 1, 2, 3, 4, 5),
arrow.Arrow(2013, 1, 5, 6, 7, 8),
)
)
assert result == [
arrow.Arrow(2013, 1, 2, 3, 4, 5),
arrow.Arrow(2013, 1, 3, 3, 4, 5),
arrow.Arrow(2013, 1, 4, 3, 4, 5),
arrow.Arrow(2013, 1, 5, 3, 4, 5),
]
def test_naive_tz(self):
result = arrow.Arrow.range(
"year", datetime(2013, 1, 2, 3), datetime(2016, 4, 5, 6), "US/Pacific"
)
for r in result:
assert r.tzinfo == tz.gettz("US/Pacific")
def test_aware_same_tz(self):
result = arrow.Arrow.range(
"day",
arrow.Arrow(2013, 1, 1, tzinfo=tz.gettz("US/Pacific")),
arrow.Arrow(2013, 1, 3, tzinfo=tz.gettz("US/Pacific")),
)
for r in result:
assert r.tzinfo == tz.gettz("US/Pacific")
def test_aware_different_tz(self):
result = arrow.Arrow.range(
"day",
datetime(2013, 1, 1, tzinfo=tz.gettz("US/Eastern")),
datetime(2013, 1, 3, tzinfo=tz.gettz("US/Pacific")),
)
for r in result:
assert r.tzinfo == tz.gettz("US/Eastern")
def test_aware_tz(self):
result = arrow.Arrow.range(
"day",
datetime(2013, 1, 1, tzinfo=tz.gettz("US/Eastern")),
datetime(2013, 1, 3, tzinfo=tz.gettz("US/Pacific")),
tz=tz.gettz("US/Central"),
)
for r in result:
assert r.tzinfo == tz.gettz("US/Central")
def test_imaginary(self):
# issue #72, avoid duplication in utc column
before = arrow.Arrow(2018, 3, 10, 23, tzinfo="US/Pacific")
after = arrow.Arrow(2018, 3, 11, 4, tzinfo="US/Pacific")
pacific_range = [t for t in arrow.Arrow.range("hour", before, after)]
utc_range = [t.to("utc") for t in arrow.Arrow.range("hour", before, after)]
assert len(pacific_range) == len(set(pacific_range))
assert len(utc_range) == len(set(utc_range))
def test_unsupported(self):
with pytest.raises(ValueError):
next(arrow.Arrow.range("abc", datetime.utcnow(), datetime.utcnow()))
def test_range_over_months_ending_on_different_days(self):
# regression test for issue #842
result = list(arrow.Arrow.range("month", datetime(2015, 1, 31), limit=4))
assert result == [
arrow.Arrow(2015, 1, 31),
arrow.Arrow(2015, 2, 28),
arrow.Arrow(2015, 3, 31),
arrow.Arrow(2015, 4, 30),
]
result = list(arrow.Arrow.range("month", datetime(2015, 1, 30), limit=3))
assert result == [
arrow.Arrow(2015, 1, 30),
arrow.Arrow(2015, 2, 28),
arrow.Arrow(2015, 3, 30),
]
result = list(arrow.Arrow.range("month", datetime(2015, 2, 28), limit=3))
assert result == [
arrow.Arrow(2015, 2, 28),
arrow.Arrow(2015, 3, 28),
arrow.Arrow(2015, 4, 28),
]
result = list(arrow.Arrow.range("month", datetime(2015, 3, 31), limit=3))
assert result == [
arrow.Arrow(2015, 3, 31),
arrow.Arrow(2015, 4, 30),
arrow.Arrow(2015, 5, 31),
]
def test_range_over_quarter_months_ending_on_different_days(self):
result = list(arrow.Arrow.range("quarter", datetime(2014, 11, 30), limit=3))
assert result == [
arrow.Arrow(2014, 11, 30),
arrow.Arrow(2015, 2, 28),
arrow.Arrow(2015, 5, 30),
]
def test_range_over_year_maintains_end_date_across_leap_year(self):
result = list(arrow.Arrow.range("year", datetime(2012, 2, 29), limit=5))
assert result == [
arrow.Arrow(2012, 2, 29),
arrow.Arrow(2013, 2, 28),
arrow.Arrow(2014, 2, 28),
arrow.Arrow(2015, 2, 28),
arrow.Arrow(2016, 2, 29),
]
class TestArrowSpanRange:
def test_year(self):
result = list(
arrow.Arrow.span_range("year", datetime(2013, 2, 1), datetime(2016, 3, 31))
)
assert result == [
(
arrow.Arrow(2013, 1, 1),
arrow.Arrow(2013, 12, 31, 23, 59, 59, 999999),
),
(
arrow.Arrow(2014, 1, 1),
arrow.Arrow(2014, 12, 31, 23, 59, 59, 999999),
),
(
arrow.Arrow(2015, 1, 1),
arrow.Arrow(2015, 12, 31, 23, 59, 59, 999999),
),
(
arrow.Arrow(2016, 1, 1),
arrow.Arrow(2016, 12, 31, 23, 59, 59, 999999),
),
]
def test_quarter(self):
result = list(
arrow.Arrow.span_range(
"quarter", datetime(2013, 2, 2), datetime(2013, 5, 15)
)
)
assert result == [
(arrow.Arrow(2013, 1, 1), arrow.Arrow(2013, 3, 31, 23, 59, 59, 999999)),
(arrow.Arrow(2013, 4, 1), arrow.Arrow(2013, 6, 30, 23, 59, 59, 999999)),
]
def test_month(self):
result = list(
arrow.Arrow.span_range("month", datetime(2013, 1, 2), datetime(2013, 4, 15))
)
assert result == [
(arrow.Arrow(2013, 1, 1), arrow.Arrow(2013, 1, 31, 23, 59, 59, 999999)),
(arrow.Arrow(2013, 2, 1), arrow.Arrow(2013, 2, 28, 23, 59, 59, 999999)),
(arrow.Arrow(2013, 3, 1), arrow.Arrow(2013, 3, 31, 23, 59, 59, 999999)),
(arrow.Arrow(2013, 4, 1), arrow.Arrow(2013, 4, 30, 23, 59, 59, 999999)),
]
def test_week(self):
result = list(
arrow.Arrow.span_range("week", datetime(2013, 2, 2), datetime(2013, 2, 28))
)
assert result == [
(arrow.Arrow(2013, 1, 28), arrow.Arrow(2013, 2, 3, 23, 59, 59, 999999)),
(arrow.Arrow(2013, 2, 4), arrow.Arrow(2013, 2, 10, 23, 59, 59, 999999)),
(
arrow.Arrow(2013, 2, 11),
arrow.Arrow(2013, 2, 17, 23, 59, 59, 999999),
),
(
arrow.Arrow(2013, 2, 18),
arrow.Arrow(2013, 2, 24, 23, 59, 59, 999999),
),
(arrow.Arrow(2013, 2, 25), arrow.Arrow(2013, 3, 3, 23, 59, 59, 999999)),
]
def test_day(self):
result = list(
arrow.Arrow.span_range(
"day", datetime(2013, 1, 1, 12), datetime(2013, 1, 4, 12)
)
)
assert result == [
(
arrow.Arrow(2013, 1, 1, 0),
arrow.Arrow(2013, 1, 1, 23, 59, 59, 999999),
),
(
arrow.Arrow(2013, 1, 2, 0),
arrow.Arrow(2013, 1, 2, 23, 59, 59, 999999),
),
(
arrow.Arrow(2013, 1, 3, 0),
arrow.Arrow(2013, 1, 3, 23, 59, 59, 999999),
),
(
arrow.Arrow(2013, 1, 4, 0),
arrow.Arrow(2013, 1, 4, 23, 59, 59, 999999),
),
]
def test_days(self):
result = list(
arrow.Arrow.span_range(
"days", datetime(2013, 1, 1, 12), datetime(2013, 1, 4, 12)
)
)
assert result == [
(
arrow.Arrow(2013, 1, 1, 0),
arrow.Arrow(2013, 1, 1, 23, 59, 59, 999999),
),
(
arrow.Arrow(2013, 1, 2, 0),
arrow.Arrow(2013, 1, 2, 23, 59, 59, 999999),
),
(
arrow.Arrow(2013, 1, 3, 0),
arrow.Arrow(2013, 1, 3, 23, 59, 59, 999999),
),
(
arrow.Arrow(2013, 1, 4, 0),
arrow.Arrow(2013, 1, 4, 23, 59, 59, 999999),
),
]
def test_hour(self):
result = list(
arrow.Arrow.span_range(
"hour", datetime(2013, 1, 1, 0, 30), datetime(2013, 1, 1, 3, 30)
)
)
assert result == [
(
arrow.Arrow(2013, 1, 1, 0),
arrow.Arrow(2013, 1, 1, 0, 59, 59, 999999),
),
(
arrow.Arrow(2013, 1, 1, 1),
arrow.Arrow(2013, 1, 1, 1, 59, 59, 999999),
),
(
arrow.Arrow(2013, 1, 1, 2),
arrow.Arrow(2013, 1, 1, 2, 59, 59, 999999),
),
(
arrow.Arrow(2013, 1, 1, 3),
arrow.Arrow(2013, 1, 1, 3, 59, 59, 999999),
),
]
result = list(
arrow.Arrow.span_range(
"hour", datetime(2013, 1, 1, 3, 30), datetime(2013, 1, 1, 3, 30)
)
)
assert result == [
(arrow.Arrow(2013, 1, 1, 3), arrow.Arrow(2013, 1, 1, 3, 59, 59, 999999))
]
def test_minute(self):
result = list(
arrow.Arrow.span_range(
"minute", datetime(2013, 1, 1, 0, 0, 30), datetime(2013, 1, 1, 0, 3, 30)
)
)
assert result == [
(
arrow.Arrow(2013, 1, 1, 0, 0),
arrow.Arrow(2013, 1, 1, 0, 0, 59, 999999),
),
(
arrow.Arrow(2013, 1, 1, 0, 1),
arrow.Arrow(2013, 1, 1, 0, 1, 59, 999999),
),
(
arrow.Arrow(2013, 1, 1, 0, 2),
arrow.Arrow(2013, 1, 1, 0, 2, 59, 999999),
),
(
arrow.Arrow(2013, 1, 1, 0, 3),
arrow.Arrow(2013, 1, 1, 0, 3, 59, 999999),
),
]
def test_second(self):
result = list(
arrow.Arrow.span_range(
"second", datetime(2013, 1, 1), datetime(2013, 1, 1, 0, 0, 3)
)
)
assert result == [
(
arrow.Arrow(2013, 1, 1, 0, 0, 0),
arrow.Arrow(2013, 1, 1, 0, 0, 0, 999999),
),
(
arrow.Arrow(2013, 1, 1, 0, 0, 1),
arrow.Arrow(2013, 1, 1, 0, 0, 1, 999999),
),
(
arrow.Arrow(2013, 1, 1, 0, 0, 2),
arrow.Arrow(2013, 1, 1, 0, 0, 2, 999999),
),
(
arrow.Arrow(2013, 1, 1, 0, 0, 3),
arrow.Arrow(2013, 1, 1, 0, 0, 3, 999999),
),
]
def test_naive_tz(self):
tzinfo = tz.gettz("US/Pacific")
result = arrow.Arrow.span_range(
"hour", datetime(2013, 1, 1, 0), datetime(2013, 1, 1, 3, 59), "US/Pacific"
)
for f, c in result:
assert f.tzinfo == tzinfo
assert c.tzinfo == tzinfo
def test_aware_same_tz(self):
tzinfo = tz.gettz("US/Pacific")
result = arrow.Arrow.span_range(
"hour",
datetime(2013, 1, 1, 0, tzinfo=tzinfo),
datetime(2013, 1, 1, 2, 59, tzinfo=tzinfo),
)
for f, c in result:
assert f.tzinfo == tzinfo
assert c.tzinfo == tzinfo
def test_aware_different_tz(self):
tzinfo1 = tz.gettz("US/Pacific")
tzinfo2 = tz.gettz("US/Eastern")
result = arrow.Arrow.span_range(
"hour",
datetime(2013, 1, 1, 0, tzinfo=tzinfo1),
datetime(2013, 1, 1, 2, 59, tzinfo=tzinfo2),
)
for f, c in result:
assert f.tzinfo == tzinfo1
assert c.tzinfo == tzinfo1
def test_aware_tz(self):
result = arrow.Arrow.span_range(
"hour",
datetime(2013, 1, 1, 0, tzinfo=tz.gettz("US/Eastern")),
datetime(2013, 1, 1, 2, 59, tzinfo=tz.gettz("US/Eastern")),
tz="US/Central",
)
for f, c in result:
assert f.tzinfo == tz.gettz("US/Central")
assert c.tzinfo == tz.gettz("US/Central")
def test_bounds_param_is_passed(self):
result = list(
arrow.Arrow.span_range(
"quarter", datetime(2013, 2, 2), datetime(2013, 5, 15), bounds="[]"
)
)
assert result == [
(arrow.Arrow(2013, 1, 1), arrow.Arrow(2013, 4, 1)),
(arrow.Arrow(2013, 4, 1), arrow.Arrow(2013, 7, 1)),
]
def test_exact_bound_exclude(self):
result = list(
arrow.Arrow.span_range(
"hour",
datetime(2013, 5, 5, 12, 30),
datetime(2013, 5, 5, 17, 15),
bounds="[)",
exact=True,
)
)
expected = [
(
arrow.Arrow(2013, 5, 5, 12, 30),
arrow.Arrow(2013, 5, 5, 13, 29, 59, 999999),
),
(
arrow.Arrow(2013, 5, 5, 13, 30),
arrow.Arrow(2013, 5, 5, 14, 29, 59, 999999),
),
(
arrow.Arrow(2013, 5, 5, 14, 30),
arrow.Arrow(2013, 5, 5, 15, 29, 59, 999999),
),
(
arrow.Arrow(2013, 5, 5, 15, 30),
arrow.Arrow(2013, 5, 5, 16, 29, 59, 999999),
),
(
arrow.Arrow(2013, 5, 5, 16, 30),
arrow.Arrow(2013, 5, 5, 17, 14, 59, 999999),
),
]
assert result == expected
def test_exact_floor_equals_end(self):
result = list(
arrow.Arrow.span_range(
"minute",
datetime(2013, 5, 5, 12, 30),
datetime(2013, 5, 5, 12, 40),
exact=True,
)
)
expected = [
(
arrow.Arrow(2013, 5, 5, 12, 30),
arrow.Arrow(2013, 5, 5, 12, 30, 59, 999999),
),
(
arrow.Arrow(2013, 5, 5, 12, 31),
arrow.Arrow(2013, 5, 5, 12, 31, 59, 999999),
),
(
arrow.Arrow(2013, 5, 5, 12, 32),
arrow.Arrow(2013, 5, 5, 12, 32, 59, 999999),
),
(
arrow.Arrow(2013, 5, 5, 12, 33),
arrow.Arrow(2013, 5, 5, 12, 33, 59, 999999),
),
(
arrow.Arrow(2013, 5, 5, 12, 34),
arrow.Arrow(2013, 5, 5, 12, 34, 59, 999999),
),
(
arrow.Arrow(2013, 5, 5, 12, 35),
arrow.Arrow(2013, 5, 5, 12, 35, 59, 999999),
),
(
arrow.Arrow(2013, 5, 5, 12, 36),
arrow.Arrow(2013, 5, 5, 12, 36, 59, 999999),
),
(
arrow.Arrow(2013, 5, 5, 12, 37),
arrow.Arrow(2013, 5, 5, 12, 37, 59, 999999),
),
(
arrow.Arrow(2013, 5, 5, 12, 38),
arrow.Arrow(2013, 5, 5, 12, 38, 59, 999999),
),
(
arrow.Arrow(2013, 5, 5, 12, 39),
arrow.Arrow(2013, 5, 5, 12, 39, 59, 999999),
),
]
assert result == expected
def test_exact_bound_include(self):
result = list(
arrow.Arrow.span_range(
"hour",
datetime(2013, 5, 5, 2, 30),
datetime(2013, 5, 5, 6, 00),
bounds="(]",
exact=True,
)
)
expected = [
(
arrow.Arrow(2013, 5, 5, 2, 30, 00, 1),
arrow.Arrow(2013, 5, 5, 3, 30, 00, 0),
),
(
arrow.Arrow(2013, 5, 5, 3, 30, 00, 1),
arrow.Arrow(2013, 5, 5, 4, 30, 00, 0),
),
(
arrow.Arrow(2013, 5, 5, 4, 30, 00, 1),
arrow.Arrow(2013, 5, 5, 5, 30, 00, 0),
),
(
arrow.Arrow(2013, 5, 5, 5, 30, 00, 1),
arrow.Arrow(2013, 5, 5, 6, 00),
),
]
assert result == expected
def test_small_interval_exact_open_bounds(self):
result = list(
arrow.Arrow.span_range(
"minute",
datetime(2013, 5, 5, 2, 30),
datetime(2013, 5, 5, 2, 31),
bounds="()",
exact=True,
)
)
expected = [
(
arrow.Arrow(2013, 5, 5, 2, 30, 00, 1),
arrow.Arrow(2013, 5, 5, 2, 30, 59, 999999),
),
]
assert result == expected
class TestArrowInterval:
def test_incorrect_input(self):
with pytest.raises(ValueError):
list(
arrow.Arrow.interval(
"month", datetime(2013, 1, 2), datetime(2013, 4, 15), 0
)
)
def test_correct(self):
result = list(
arrow.Arrow.interval(
"hour", datetime(2013, 5, 5, 12, 30), datetime(2013, 5, 5, 17, 15), 2
)
)
assert result == [
(
arrow.Arrow(2013, 5, 5, 12),
arrow.Arrow(2013, 5, 5, 13, 59, 59, 999999),
),
(
arrow.Arrow(2013, 5, 5, 14),
arrow.Arrow(2013, 5, 5, 15, 59, 59, 999999),
),
(
arrow.Arrow(2013, 5, 5, 16),
arrow.Arrow(2013, 5, 5, 17, 59, 59, 999999),
),
]
def test_bounds_param_is_passed(self):
result = list(
arrow.Arrow.interval(
"hour",
datetime(2013, 5, 5, 12, 30),
datetime(2013, 5, 5, 17, 15),
2,
bounds="[]",
)
)
assert result == [
(arrow.Arrow(2013, 5, 5, 12), arrow.Arrow(2013, 5, 5, 14)),
(arrow.Arrow(2013, 5, 5, 14), arrow.Arrow(2013, 5, 5, 16)),
(arrow.Arrow(2013, 5, 5, 16), arrow.Arrow(2013, 5, 5, 18)),
]
def test_exact(self):
result = list(
arrow.Arrow.interval(
"hour",
datetime(2013, 5, 5, 12, 30),
datetime(2013, 5, 5, 17, 15),
4,
exact=True,
)
)
expected = [
(
arrow.Arrow(2013, 5, 5, 12, 30),
arrow.Arrow(2013, 5, 5, 16, 29, 59, 999999),
),
(
arrow.Arrow(2013, 5, 5, 16, 30),
arrow.Arrow(2013, 5, 5, 17, 14, 59, 999999),
),
]
assert result == expected
@pytest.mark.usefixtures("time_2013_02_15")
class TestArrowSpan:
def test_span_attribute(self):
with pytest.raises(ValueError):
self.arrow.span("span")
def test_span_year(self):
floor, ceil = self.arrow.span("year")
assert floor == datetime(2013, 1, 1, tzinfo=tz.tzutc())
assert ceil == datetime(2013, 12, 31, 23, 59, 59, 999999, tzinfo=tz.tzutc())
def test_span_quarter(self):
floor, ceil = self.arrow.span("quarter")
assert floor == datetime(2013, 1, 1, tzinfo=tz.tzutc())
assert ceil == datetime(2013, 3, 31, 23, 59, 59, 999999, tzinfo=tz.tzutc())
def test_span_quarter_count(self):
floor, ceil = self.arrow.span("quarter", 2)
assert floor == datetime(2013, 1, 1, tzinfo=tz.tzutc())
assert ceil == datetime(2013, 6, 30, 23, 59, 59, 999999, tzinfo=tz.tzutc())
def test_span_year_count(self):
floor, ceil = self.arrow.span("year", 2)
assert floor == datetime(2013, 1, 1, tzinfo=tz.tzutc())
assert ceil == datetime(2014, 12, 31, 23, 59, 59, 999999, tzinfo=tz.tzutc())
def test_span_month(self):
floor, ceil = self.arrow.span("month")
assert floor == datetime(2013, 2, 1, tzinfo=tz.tzutc())
assert ceil == datetime(2013, 2, 28, 23, 59, 59, 999999, tzinfo=tz.tzutc())
def test_span_week(self):
"""
>>> self.arrow.format("YYYY-MM-DD") == "2013-02-15"
>>> self.arrow.isoweekday() == 5 # a Friday
"""
# span week from Monday to Sunday
floor, ceil = self.arrow.span("week")
assert floor == datetime(2013, 2, 11, tzinfo=tz.tzutc())
assert ceil == datetime(2013, 2, 17, 23, 59, 59, 999999, tzinfo=tz.tzutc())
# span week from Tuesday to Monday
floor, ceil = self.arrow.span("week", week_start=2)
assert floor == datetime(2013, 2, 12, tzinfo=tz.tzutc())
assert ceil == datetime(2013, 2, 18, 23, 59, 59, 999999, tzinfo=tz.tzutc())
# span week from Saturday to Friday
floor, ceil = self.arrow.span("week", week_start=6)
assert floor == datetime(2013, 2, 9, tzinfo=tz.tzutc())
assert ceil == datetime(2013, 2, 15, 23, 59, 59, 999999, tzinfo=tz.tzutc())
# span week from Sunday to Saturday
floor, ceil = self.arrow.span("week", week_start=7)
assert floor == datetime(2013, 2, 10, tzinfo=tz.tzutc())
assert ceil == datetime(2013, 2, 16, 23, 59, 59, 999999, tzinfo=tz.tzutc())
def test_span_day(self):
floor, ceil = self.arrow.span("day")
assert floor == datetime(2013, 2, 15, tzinfo=tz.tzutc())
assert ceil == datetime(2013, 2, 15, 23, 59, 59, 999999, tzinfo=tz.tzutc())
def test_span_hour(self):
floor, ceil = self.arrow.span("hour")
assert floor == datetime(2013, 2, 15, 3, tzinfo=tz.tzutc())
assert ceil == datetime(2013, 2, 15, 3, 59, 59, 999999, tzinfo=tz.tzutc())
def test_span_minute(self):
floor, ceil = self.arrow.span("minute")
assert floor == datetime(2013, 2, 15, 3, 41, tzinfo=tz.tzutc())
assert ceil == datetime(2013, 2, 15, 3, 41, 59, 999999, tzinfo=tz.tzutc())
def test_span_second(self):
floor, ceil = self.arrow.span("second")
assert floor == datetime(2013, 2, 15, 3, 41, 22, tzinfo=tz.tzutc())
assert ceil == datetime(2013, 2, 15, 3, 41, 22, 999999, tzinfo=tz.tzutc())
def test_span_microsecond(self):
floor, ceil = self.arrow.span("microsecond")
assert floor == datetime(2013, 2, 15, 3, 41, 22, 8923, tzinfo=tz.tzutc())
assert ceil == datetime(2013, 2, 15, 3, 41, 22, 8923, tzinfo=tz.tzutc())
def test_floor(self):
floor, ceil = self.arrow.span("month")
assert floor == self.arrow.floor("month")
assert ceil == self.arrow.ceil("month")
def test_span_inclusive_inclusive(self):
floor, ceil = self.arrow.span("hour", bounds="[]")
assert floor == datetime(2013, 2, 15, 3, tzinfo=tz.tzutc())
assert ceil == datetime(2013, 2, 15, 4, tzinfo=tz.tzutc())
def test_span_exclusive_inclusive(self):
floor, ceil = self.arrow.span("hour", bounds="(]")
assert floor == datetime(2013, 2, 15, 3, 0, 0, 1, tzinfo=tz.tzutc())
assert ceil == datetime(2013, 2, 15, 4, tzinfo=tz.tzutc())
def test_span_exclusive_exclusive(self):
floor, ceil = self.arrow.span("hour", bounds="()")
assert floor == datetime(2013, 2, 15, 3, 0, 0, 1, tzinfo=tz.tzutc())
assert ceil == datetime(2013, 2, 15, 3, 59, 59, 999999, tzinfo=tz.tzutc())
def test_bounds_are_validated(self):
with pytest.raises(ValueError):
floor, ceil = self.arrow.span("hour", bounds="][")
def test_exact(self):
result_floor, result_ceil = self.arrow.span("hour", exact=True)
expected_floor = datetime(2013, 2, 15, 3, 41, 22, 8923, tzinfo=tz.tzutc())
expected_ceil = datetime(2013, 2, 15, 4, 41, 22, 8922, tzinfo=tz.tzutc())
assert result_floor == expected_floor
assert result_ceil == expected_ceil
def test_exact_inclusive_inclusive(self):
floor, ceil = self.arrow.span("minute", bounds="[]", exact=True)
assert floor == datetime(2013, 2, 15, 3, 41, 22, 8923, tzinfo=tz.tzutc())
assert ceil == datetime(2013, 2, 15, 3, 42, 22, 8923, tzinfo=tz.tzutc())
def test_exact_exclusive_inclusive(self):
floor, ceil = self.arrow.span("day", bounds="(]", exact=True)
assert floor == datetime(2013, 2, 15, 3, 41, 22, 8924, tzinfo=tz.tzutc())
assert ceil == datetime(2013, 2, 16, 3, 41, 22, 8923, tzinfo=tz.tzutc())
def test_exact_exclusive_exclusive(self):
floor, ceil = self.arrow.span("second", bounds="()", exact=True)
assert floor == datetime(2013, 2, 15, 3, 41, 22, 8924, tzinfo=tz.tzutc())
assert ceil == datetime(2013, 2, 15, 3, 41, 23, 8922, tzinfo=tz.tzutc())
def test_all_parameters_specified(self):
floor, ceil = self.arrow.span("week", bounds="()", exact=True, count=2)
assert floor == datetime(2013, 2, 15, 3, 41, 22, 8924, tzinfo=tz.tzutc())
assert ceil == datetime(2013, 3, 1, 3, 41, 22, 8922, tzinfo=tz.tzutc())
@pytest.mark.usefixtures("time_2013_01_01")
class TestArrowHumanize:
def test_granularity(self):
assert self.now.humanize(granularity="second") == "just now"
later1 = self.now.shift(seconds=1)
assert self.now.humanize(later1, granularity="second") == "just now"
assert later1.humanize(self.now, granularity="second") == "just now"
assert self.now.humanize(later1, granularity="minute") == "0 minutes ago"
assert later1.humanize(self.now, granularity="minute") == "in 0 minutes"
later100 = self.now.shift(seconds=100)
assert self.now.humanize(later100, granularity="second") == "100 seconds ago"
assert later100.humanize(self.now, granularity="second") == "in 100 seconds"
assert self.now.humanize(later100, granularity="minute") == "a minute ago"
assert later100.humanize(self.now, granularity="minute") == "in a minute"
assert self.now.humanize(later100, granularity="hour") == "0 hours ago"
assert later100.humanize(self.now, granularity="hour") == "in 0 hours"
later4000 = self.now.shift(seconds=4000)
assert self.now.humanize(later4000, granularity="minute") == "66 minutes ago"
assert later4000.humanize(self.now, granularity="minute") == "in 66 minutes"
assert self.now.humanize(later4000, granularity="hour") == "an hour ago"
assert later4000.humanize(self.now, granularity="hour") == "in an hour"
assert self.now.humanize(later4000, granularity="day") == "0 days ago"
assert later4000.humanize(self.now, granularity="day") == "in 0 days"
later105 = self.now.shift(seconds=10 ** 5)
assert self.now.humanize(later105, granularity="hour") == "27 hours ago"
assert later105.humanize(self.now, granularity="hour") == "in 27 hours"
assert self.now.humanize(later105, granularity="day") == "a day ago"
assert later105.humanize(self.now, granularity="day") == "in a day"
assert self.now.humanize(later105, granularity="week") == "0 weeks ago"
assert later105.humanize(self.now, granularity="week") == "in 0 weeks"
assert self.now.humanize(later105, granularity="month") == "0 months ago"
assert later105.humanize(self.now, granularity="month") == "in 0 months"
assert self.now.humanize(later105, granularity=["month"]) == "0 months ago"
assert later105.humanize(self.now, granularity=["month"]) == "in 0 months"
later106 = self.now.shift(seconds=3 * 10 ** 6)
assert self.now.humanize(later106, granularity="day") == "34 days ago"
assert later106.humanize(self.now, granularity="day") == "in 34 days"
assert self.now.humanize(later106, granularity="week") == "4 weeks ago"
assert later106.humanize(self.now, granularity="week") == "in 4 weeks"
assert self.now.humanize(later106, granularity="month") == "a month ago"
assert later106.humanize(self.now, granularity="month") == "in a month"
assert self.now.humanize(later106, granularity="year") == "0 years ago"
assert later106.humanize(self.now, granularity="year") == "in 0 years"
later506 = self.now.shift(seconds=50 * 10 ** 6)
assert self.now.humanize(later506, granularity="week") == "82 weeks ago"
assert later506.humanize(self.now, granularity="week") == "in 82 weeks"
assert self.now.humanize(later506, granularity="month") == "18 months ago"
assert later506.humanize(self.now, granularity="month") == "in 18 months"
assert self.now.humanize(later506, granularity="year") == "a year ago"
assert later506.humanize(self.now, granularity="year") == "in a year"
later108 = self.now.shift(seconds=10 ** 8)
assert self.now.humanize(later108, granularity="year") == "3 years ago"
assert later108.humanize(self.now, granularity="year") == "in 3 years"
later108onlydistance = self.now.shift(seconds=10 ** 8)
assert (
self.now.humanize(
later108onlydistance, only_distance=True, granularity="year"
)
== "3 years"
)
assert (
later108onlydistance.humanize(
self.now, only_distance=True, granularity="year"
)
== "3 years"
)
with pytest.raises(ValueError):
self.now.humanize(later108, granularity="years")
def test_multiple_granularity(self):
assert self.now.humanize(granularity="second") == "just now"
assert self.now.humanize(granularity=["second"]) == "just now"
assert (
self.now.humanize(granularity=["year", "month", "day", "hour", "second"])
== "in 0 years 0 months 0 days 0 hours and 0 seconds"
)
later4000 = self.now.shift(seconds=4000)
assert (
later4000.humanize(self.now, granularity=["hour", "minute"])
== "in an hour and 6 minutes"
)
assert (
self.now.humanize(later4000, granularity=["hour", "minute"])
== "an hour and 6 minutes ago"
)
assert (
later4000.humanize(
self.now, granularity=["hour", "minute"], only_distance=True
)
== "an hour and 6 minutes"
)
assert (
later4000.humanize(self.now, granularity=["day", "hour", "minute"])
== "in 0 days an hour and 6 minutes"
)
assert (
self.now.humanize(later4000, granularity=["day", "hour", "minute"])
== "0 days an hour and 6 minutes ago"
)
later105 = self.now.shift(seconds=10 ** 5)
assert (
self.now.humanize(later105, granularity=["hour", "day", "minute"])
== "a day 3 hours and 46 minutes ago"
)
with pytest.raises(ValueError):
self.now.humanize(later105, granularity=["error", "second"])
later108onlydistance = self.now.shift(seconds=10 ** 8)
assert (
self.now.humanize(
later108onlydistance, only_distance=True, granularity=["year"]
)
== "3 years"
)
assert (
self.now.humanize(
later108onlydistance, only_distance=True, granularity=["month", "week"]
)
== "37 months and 4 weeks"
)
# this will change when leap years are implemented
assert (
self.now.humanize(
later108onlydistance, only_distance=True, granularity=["year", "second"]
)
== "3 years and 5392000 seconds"
)
one_min_one_sec_ago = self.now.shift(minutes=-1, seconds=-1)
assert (
one_min_one_sec_ago.humanize(self.now, granularity=["minute", "second"])
== "a minute and a second ago"
)
one_min_two_secs_ago = self.now.shift(minutes=-1, seconds=-2)
assert (
one_min_two_secs_ago.humanize(self.now, granularity=["minute", "second"])
== "a minute and 2 seconds ago"
)
def test_seconds(self):
later = self.now.shift(seconds=10)
# regression test for issue #727
assert self.now.humanize(later) == "10 seconds ago"
assert later.humanize(self.now) == "in 10 seconds"
assert self.now.humanize(later, only_distance=True) == "10 seconds"
assert later.humanize(self.now, only_distance=True) == "10 seconds"
def test_minute(self):
later = self.now.shift(minutes=1)
assert self.now.humanize(later) == "a minute ago"
assert later.humanize(self.now) == "in a minute"
assert self.now.humanize(later, only_distance=True) == "a minute"
assert later.humanize(self.now, only_distance=True) == "a minute"
def test_minutes(self):
later = self.now.shift(minutes=2)
assert self.now.humanize(later) == "2 minutes ago"
assert later.humanize(self.now) == "in 2 minutes"
assert self.now.humanize(later, only_distance=True) == "2 minutes"
assert later.humanize(self.now, only_distance=True) == "2 minutes"
def test_hour(self):
later = self.now.shift(hours=1)
assert self.now.humanize(later) == "an hour ago"
assert later.humanize(self.now) == "in an hour"
assert self.now.humanize(later, only_distance=True) == "an hour"
assert later.humanize(self.now, only_distance=True) == "an hour"
def test_hours(self):
later = self.now.shift(hours=2)
assert self.now.humanize(later) == "2 hours ago"
assert later.humanize(self.now) == "in 2 hours"
assert self.now.humanize(later, only_distance=True) == "2 hours"
assert later.humanize(self.now, only_distance=True) == "2 hours"
def test_day(self):
later = self.now.shift(days=1)
assert self.now.humanize(later) == "a day ago"
assert later.humanize(self.now) == "in a day"
# regression test for issue #697
less_than_48_hours = self.now.shift(
days=1, hours=23, seconds=59, microseconds=999999
)
assert self.now.humanize(less_than_48_hours) == "a day ago"
assert less_than_48_hours.humanize(self.now) == "in a day"
less_than_48_hours_date = less_than_48_hours._datetime.date()
with pytest.raises(TypeError):
# humanize other argument does not take raw datetime.date objects
self.now.humanize(less_than_48_hours_date)
assert self.now.humanize(later, only_distance=True) == "a day"
assert later.humanize(self.now, only_distance=True) == "a day"
def test_days(self):
later = self.now.shift(days=2)
assert self.now.humanize(later) == "2 days ago"
assert later.humanize(self.now) == "in 2 days"
assert self.now.humanize(later, only_distance=True) == "2 days"
assert later.humanize(self.now, only_distance=True) == "2 days"
# Regression tests for humanize bug referenced in issue 541
later = self.now.shift(days=3)
assert later.humanize(self.now) == "in 3 days"
later = self.now.shift(days=3, seconds=1)
assert later.humanize(self.now) == "in 3 days"
later = self.now.shift(days=4)
assert later.humanize(self.now) == "in 4 days"
def test_week(self):
later = self.now.shift(weeks=1)
assert self.now.humanize(later) == "a week ago"
assert later.humanize(self.now) == "in a week"
assert self.now.humanize(later, only_distance=True) == "a week"
assert later.humanize(self.now, only_distance=True) == "a week"
def test_weeks(self):
later = self.now.shift(weeks=2)
assert self.now.humanize(later) == "2 weeks ago"
assert later.humanize(self.now) == "in 2 weeks"
assert self.now.humanize(later, only_distance=True) == "2 weeks"
assert later.humanize(self.now, only_distance=True) == "2 weeks"
@pytest.mark.xfail(reason="known issue with humanize month limits")
def test_month(self):
later = self.now.shift(months=1)
# TODO this test now returns "4 weeks ago", we need to fix this to be correct on a per month basis
assert self.now.humanize(later) == "a month ago"
assert later.humanize(self.now) == "in a month"
assert self.now.humanize(later, only_distance=True) == "a month"
assert later.humanize(self.now, only_distance=True) == "a month"
def test_month_plus_4_days(self):
# TODO needed for coverage, remove when month limits are fixed
later = self.now.shift(months=1, days=4)
assert self.now.humanize(later) == "a month ago"
assert later.humanize(self.now) == "in a month"
@pytest.mark.xfail(reason="known issue with humanize month limits")
def test_months(self):
later = self.now.shift(months=2)
earlier = self.now.shift(months=-2)
assert earlier.humanize(self.now) == "2 months ago"
assert later.humanize(self.now) == "in 2 months"
assert self.now.humanize(later, only_distance=True) == "2 months"
assert later.humanize(self.now, only_distance=True) == "2 months"
def test_year(self):
later = self.now.shift(years=1)
assert self.now.humanize(later) == "a year ago"
assert later.humanize(self.now) == "in a year"
assert self.now.humanize(later, only_distance=True) == "a year"
assert later.humanize(self.now, only_distance=True) == "a year"
def test_years(self):
later = self.now.shift(years=2)
assert self.now.humanize(later) == "2 years ago"
assert later.humanize(self.now) == "in 2 years"
assert self.now.humanize(later, only_distance=True) == "2 years"
assert later.humanize(self.now, only_distance=True) == "2 years"
arw = arrow.Arrow(2014, 7, 2)
result = arw.humanize(self.datetime)
assert result == "in a year"
def test_arrow(self):
arw = arrow.Arrow.fromdatetime(self.datetime)
result = arw.humanize(arrow.Arrow.fromdatetime(self.datetime))
assert result == "just now"
def test_datetime_tzinfo(self):
arw = arrow.Arrow.fromdatetime(self.datetime)
result = arw.humanize(self.datetime.replace(tzinfo=tz.tzutc()))
assert result == "just now"
def test_other(self):
arw = arrow.Arrow.fromdatetime(self.datetime)
with pytest.raises(TypeError):
arw.humanize(object())
def test_invalid_locale(self):
arw = arrow.Arrow.fromdatetime(self.datetime)
with pytest.raises(ValueError):
arw.humanize(locale="klingon")
def test_none(self):
arw = arrow.Arrow.utcnow()
result = arw.humanize()
assert result == "just now"
result = arw.humanize(None)
assert result == "just now"
def test_week_limit(self):
# regression test for issue #848
arw = arrow.Arrow.utcnow()
later = arw.shift(weeks=+1)
result = arw.humanize(later)
assert result == "a week ago"
def test_untranslated_granularity(self, mocker):
arw = arrow.Arrow.utcnow()
later = arw.shift(weeks=1)
# simulate an untranslated timeframe key
mocker.patch.dict("arrow.locales.EnglishLocale.timeframes")
del arrow.locales.EnglishLocale.timeframes["week"]
with pytest.raises(ValueError):
arw.humanize(later, granularity="week")
# Bulgarian is an example of a language that overrides _format_timeframe
# Applicabale to all locales. Note: Contributors need to make sure
# that if they override describe or describe_mutli, that delta
# is truncated on call
def test_no_floats(self):
arw = arrow.Arrow(2013, 1, 1, 0, 0, 0)
later = arw.shift(seconds=55000)
humanize_string = arw.humanize(later, locale="bg", granularity="minute")
assert humanize_string == "916 минути назад"
def test_no_floats_multi_gran(self):
arw = arrow.Arrow(2013, 1, 1, 0, 0, 0)
later = arw.shift(seconds=55000)
humanize_string = arw.humanize(
later, locale="bg", granularity=["second", "minute"]
)
assert humanize_string == "916 минути 40 няколко секунди назад"
@pytest.mark.usefixtures("time_2013_01_01")
class TestArrowHumanizeTestsWithLocale:
def test_now(self):
arw = arrow.Arrow(2013, 1, 1, 0, 0, 0)
result = arw.humanize(self.datetime, locale="ru")
assert result == "сейчас"
def test_seconds(self):
arw = arrow.Arrow(2013, 1, 1, 0, 0, 44)
result = arw.humanize(self.datetime, locale="ru")
assert result == "через 44 несколько секунд"
def test_years(self):
arw = arrow.Arrow(2011, 7, 2)
result = arw.humanize(self.datetime, locale="ru")
assert result == "год назад"
# Fixtures for Dehumanize
@pytest.fixture(scope="class")
def locale_list_no_weeks():
tested_langs = [
"en",
"en-us",
"en-gb",
"en-au",
"en-be",
"en-jp",
"en-za",
"en-ca",
"en-ph",
"fr",
"fr-fr",
"fr-ca",
"it",
"it-it",
"es",
"es-es",
"el",
"el-gr",
"ja",
"ja-jp",
"sv",
"sv-se",
"fi",
"fi-fi",
"zh",
"zh-cn",
"zh-tw",
"zh-hk",
"nl",
"nl-nl",
"af",
"de",
"de-de",
"de-ch",
"de-at",
"nb",
"nb-no",
"nn",
"nn-no",
"pt",
"pt-pt",
"pt_br",
"tl",
"tl-ph",
"vi",
"vi-vn",
"tr",
"tr-tr",
"az",
"az-az",
"da",
"da-dk",
"ml",
"hi",
"fa",
"fa-ir",
"mr",
"ca",
"ca-es",
"ca-ad",
"ca-fr",
"ca-it",
"eo",
"eo-xx",
"bn",
"bn-bd",
"bn-in",
"rm",
"rm-ch",
"ro",
"ro-ro",
"sl",
"sl-si",
"id",
"id-id",
"ne",
"ne-np",
"ee",
"et",
"sw",
"sw-ke",
"sw-tz",
"la",
"la-va",
"lt",
"lt-lt",
"ms",
"ms-my",
"ms-bn",
"or",
"or-in",
"se",
"se-fi",
"se-no",
"se-se",
"lb",
"lb-lu",
"zu",
"zu-za",
"sq",
"sq-al",
"ta",
"ta-in",
"ta-lk",
"ur",
"ur-pk",
]
return tested_langs
@pytest.fixture(scope="class")
def locale_list_with_weeks():
tested_langs = [
"en",
"en-us",
"en-gb",
"en-au",
"en-be",
"en-jp",
"en-za",
"en-ca",
"en-ph",
"fr",
"fr-fr",
"fr-ca",
"it",
"it-it",
"es",
"es-es",
"ja",
"ja-jp",
"sv",
"sv-se",
"zh",
"zh-cn",
"zh-tw",
"zh-hk",
"nl",
"nl-nl",
"de",
"de-de",
"de-ch",
"de-at",
"pt",
"pt-pt",
"pt-br",
"tl",
"tl-ph",
"vi",
"vi-vn",
"sw",
"sw-ke",
"sw-tz",
"la",
"la-va",
"lt",
"lt-lt",
"ms",
"ms-my",
"ms-bn",
"lb",
"lb-lu",
"zu",
"zu-za",
"ta",
"ta-in",
"ta-lk",
]
return tested_langs
class TestArrowDehumanize:
def test_now(self, locale_list_no_weeks):
for lang in locale_list_no_weeks:
arw = arrow.Arrow(2000, 6, 18, 5, 55, 0)
second_ago = arw.shift(seconds=-1)
second_future = arw.shift(seconds=1)
second_ago_string = second_ago.humanize(
arw, locale=lang, granularity=["second"]
)
second_future_string = second_future.humanize(
arw, locale=lang, granularity=["second"]
)
assert arw.dehumanize(second_ago_string, locale=lang) == arw
assert arw.dehumanize(second_future_string, locale=lang) == arw
def test_seconds(self, locale_list_no_weeks):
for lang in locale_list_no_weeks:
arw = arrow.Arrow(2000, 6, 18, 5, 55, 0)
second_ago = arw.shift(seconds=-5)
second_future = arw.shift(seconds=5)
second_ago_string = second_ago.humanize(
arw, locale=lang, granularity=["second"]
)
second_future_string = second_future.humanize(
arw, locale=lang, granularity=["second"]
)
assert arw.dehumanize(second_ago_string, locale=lang) == second_ago
assert arw.dehumanize(second_future_string, locale=lang) == second_future
def test_minute(self, locale_list_no_weeks):
for lang in locale_list_no_weeks:
arw = arrow.Arrow(2001, 6, 18, 5, 55, 0)
minute_ago = arw.shift(minutes=-1)
minute_future = arw.shift(minutes=1)
minute_ago_string = minute_ago.humanize(
arw, locale=lang, granularity=["minute"]
)
minute_future_string = minute_future.humanize(
arw, locale=lang, granularity=["minute"]
)
assert arw.dehumanize(minute_ago_string, locale=lang) == minute_ago
assert arw.dehumanize(minute_future_string, locale=lang) == minute_future
def test_minutes(self, locale_list_no_weeks):
for lang in locale_list_no_weeks:
arw = arrow.Arrow(2007, 1, 10, 5, 55, 0)
minute_ago = arw.shift(minutes=-5)
minute_future = arw.shift(minutes=5)
minute_ago_string = minute_ago.humanize(
arw, locale=lang, granularity=["minute"]
)
minute_future_string = minute_future.humanize(
arw, locale=lang, granularity=["minute"]
)
assert arw.dehumanize(minute_ago_string, locale=lang) == minute_ago
assert arw.dehumanize(minute_future_string, locale=lang) == minute_future
def test_hour(self, locale_list_no_weeks):
for lang in locale_list_no_weeks:
arw = arrow.Arrow(2009, 4, 20, 5, 55, 0)
hour_ago = arw.shift(hours=-1)
hour_future = arw.shift(hours=1)
hour_ago_string = hour_ago.humanize(arw, locale=lang, granularity=["hour"])
hour_future_string = hour_future.humanize(
arw, locale=lang, granularity=["hour"]
)
assert arw.dehumanize(hour_ago_string, locale=lang) == hour_ago
assert arw.dehumanize(hour_future_string, locale=lang) == hour_future
def test_hours(self, locale_list_no_weeks):
for lang in locale_list_no_weeks:
arw = arrow.Arrow(2010, 2, 16, 7, 55, 0)
hour_ago = arw.shift(hours=-3)
hour_future = arw.shift(hours=3)
hour_ago_string = hour_ago.humanize(arw, locale=lang, granularity=["hour"])
hour_future_string = hour_future.humanize(
arw, locale=lang, granularity=["hour"]
)
assert arw.dehumanize(hour_ago_string, locale=lang) == hour_ago
assert arw.dehumanize(hour_future_string, locale=lang) == hour_future
def test_week(self, locale_list_with_weeks):
for lang in locale_list_with_weeks:
arw = arrow.Arrow(2012, 2, 18, 1, 52, 0)
week_ago = arw.shift(weeks=-1)
week_future = arw.shift(weeks=1)
week_ago_string = week_ago.humanize(arw, locale=lang, granularity=["week"])
week_future_string = week_future.humanize(
arw, locale=lang, granularity=["week"]
)
assert arw.dehumanize(week_ago_string, locale=lang) == week_ago
assert arw.dehumanize(week_future_string, locale=lang) == week_future
def test_weeks(self, locale_list_with_weeks):
for lang in locale_list_with_weeks:
arw = arrow.Arrow(2020, 3, 18, 5, 3, 0)
week_ago = arw.shift(weeks=-7)
week_future = arw.shift(weeks=7)
week_ago_string = week_ago.humanize(arw, locale=lang, granularity=["week"])
week_future_string = week_future.humanize(
arw, locale=lang, granularity=["week"]
)
assert arw.dehumanize(week_ago_string, locale=lang) == week_ago
assert arw.dehumanize(week_future_string, locale=lang) == week_future
def test_year(self, locale_list_no_weeks):
for lang in locale_list_no_weeks:
arw = arrow.Arrow(2000, 1, 10, 5, 55, 0)
year_ago = arw.shift(years=-1)
year_future = arw.shift(years=1)
year_ago_string = year_ago.humanize(arw, locale=lang, granularity=["year"])
year_future_string = year_future.humanize(
arw, locale=lang, granularity=["year"]
)
assert arw.dehumanize(year_ago_string, locale=lang) == year_ago
assert arw.dehumanize(year_future_string, locale=lang) == year_future
def test_years(self, locale_list_no_weeks):
for lang in locale_list_no_weeks:
arw = arrow.Arrow(2000, 1, 10, 5, 55, 0)
year_ago = arw.shift(years=-10)
year_future = arw.shift(years=10)
year_ago_string = year_ago.humanize(arw, locale=lang, granularity=["year"])
year_future_string = year_future.humanize(
arw, locale=lang, granularity=["year"]
)
assert arw.dehumanize(year_ago_string, locale=lang) == year_ago
assert arw.dehumanize(year_future_string, locale=lang) == year_future
def test_gt_than_10_years(self, locale_list_no_weeks):
for lang in locale_list_no_weeks:
arw = arrow.Arrow(2000, 1, 10, 5, 55, 0)
year_ago = arw.shift(years=-25)
year_future = arw.shift(years=25)
year_ago_string = year_ago.humanize(arw, locale=lang, granularity=["year"])
year_future_string = year_future.humanize(
arw, locale=lang, granularity=["year"]
)
assert arw.dehumanize(year_ago_string, locale=lang) == year_ago
assert arw.dehumanize(year_future_string, locale=lang) == year_future
def test_mixed_granularity(self, locale_list_no_weeks):
for lang in locale_list_no_weeks:
arw = arrow.Arrow(2000, 1, 10, 5, 55, 0)
past = arw.shift(hours=-1, minutes=-1, seconds=-1)
future = arw.shift(hours=1, minutes=1, seconds=1)
past_string = past.humanize(
arw, locale=lang, granularity=["hour", "minute", "second"]
)
future_string = future.humanize(
arw, locale=lang, granularity=["hour", "minute", "second"]
)
assert arw.dehumanize(past_string, locale=lang) == past
assert arw.dehumanize(future_string, locale=lang) == future
def test_mixed_granularity_hours(self, locale_list_no_weeks):
for lang in locale_list_no_weeks:
arw = arrow.Arrow(2000, 1, 10, 5, 55, 0)
past = arw.shift(hours=-3, minutes=-1, seconds=-15)
future = arw.shift(hours=3, minutes=1, seconds=15)
past_string = past.humanize(
arw, locale=lang, granularity=["hour", "minute", "second"]
)
future_string = future.humanize(
arw, locale=lang, granularity=["hour", "minute", "second"]
)
assert arw.dehumanize(past_string, locale=lang) == past
assert arw.dehumanize(future_string, locale=lang) == future
def test_mixed_granularity_day(self, locale_list_no_weeks):
for lang in locale_list_no_weeks:
arw = arrow.Arrow(2000, 1, 10, 5, 55, 0)
past = arw.shift(days=-3, minutes=-1, seconds=-15)
future = arw.shift(days=3, minutes=1, seconds=15)
past_string = past.humanize(
arw, locale=lang, granularity=["day", "minute", "second"]
)
future_string = future.humanize(
arw, locale=lang, granularity=["day", "minute", "second"]
)
assert arw.dehumanize(past_string, locale=lang) == past
assert arw.dehumanize(future_string, locale=lang) == future
def test_mixed_granularity_day_hour(self, locale_list_no_weeks):
for lang in locale_list_no_weeks:
arw = arrow.Arrow(2000, 1, 10, 5, 55, 0)
past = arw.shift(days=-3, hours=-23, seconds=-15)
future = arw.shift(days=3, hours=23, seconds=15)
past_string = past.humanize(
arw, locale=lang, granularity=["day", "hour", "second"]
)
future_string = future.humanize(
arw, locale=lang, granularity=["day", "hour", "second"]
)
assert arw.dehumanize(past_string, locale=lang) == past
assert arw.dehumanize(future_string, locale=lang) == future
# Test to make sure unsupported locales error out
def test_unsupported_locale(self):
arw = arrow.Arrow(2000, 6, 18, 5, 55, 0)
second_ago = arw.shift(seconds=-5)
second_future = arw.shift(seconds=5)
second_ago_string = second_ago.humanize(
arw, locale="ko", granularity=["second"]
)
second_future_string = second_future.humanize(
arw, locale="ko", granularity=["second"]
)
# ko is an example of many unsupported locales currently
with pytest.raises(ValueError):
arw.dehumanize(second_ago_string, locale="ko")
with pytest.raises(ValueError):
arw.dehumanize(second_future_string, locale="ko")
# Test to ensure old style locale strings are supported
def test_normalized_locale(self):
arw = arrow.Arrow(2000, 6, 18, 5, 55, 0)
second_ago = arw.shift(seconds=-5)
second_future = arw.shift(seconds=5)
second_ago_string = second_ago.humanize(
arw, locale="zh_hk", granularity=["second"]
)
second_future_string = second_future.humanize(
arw, locale="zh_hk", granularity=["second"]
)
assert arw.dehumanize(second_ago_string, locale="zh_hk") == second_ago
assert arw.dehumanize(second_future_string, locale="zh_hk") == second_future
# Ensures relative units are required in string
def test_require_relative_unit(self, locale_list_no_weeks):
for lang in locale_list_no_weeks:
arw = arrow.Arrow(2000, 6, 18, 5, 55, 0)
second_ago = arw.shift(seconds=-5)
second_future = arw.shift(seconds=5)
second_ago_string = second_ago.humanize(
arw, locale=lang, granularity=["second"], only_distance=True
)
second_future_string = second_future.humanize(
arw, locale=lang, granularity=["second"], only_distance=True
)
with pytest.raises(ValueError):
arw.dehumanize(second_ago_string, locale=lang)
with pytest.raises(ValueError):
arw.dehumanize(second_future_string, locale=lang)
# Test for scrambled input
def test_scrambled_input(self, locale_list_no_weeks):
for lang in locale_list_no_weeks:
arw = arrow.Arrow(2000, 6, 18, 5, 55, 0)
second_ago = arw.shift(seconds=-5)
second_future = arw.shift(seconds=5)
second_ago_string = second_ago.humanize(
arw, locale=lang, granularity=["second"], only_distance=True
)
second_future_string = second_future.humanize(
arw, locale=lang, granularity=["second"], only_distance=True
)
# Scrambles input by sorting strings
second_ago_presort = sorted(second_ago_string)
second_ago_string = "".join(second_ago_presort)
second_future_presort = sorted(second_future_string)
second_future_string = "".join(second_future_presort)
with pytest.raises(ValueError):
arw.dehumanize(second_ago_string, locale=lang)
with pytest.raises(ValueError):
arw.dehumanize(second_future_string, locale=lang)
def test_no_units_modified(self, locale_list_no_weeks):
for lang in locale_list_no_weeks:
arw = arrow.Arrow(2000, 6, 18, 5, 55, 0)
# Ensures we pass the first stage of checking whether relative units exist
locale_obj = locales.get_locale(lang)
empty_past_string = locale_obj.past
empty_future_string = locale_obj.future
with pytest.raises(ValueError):
arw.dehumanize(empty_past_string, locale=lang)
with pytest.raises(ValueError):
arw.dehumanize(empty_future_string, locale=lang)
class TestArrowIsBetween:
def test_start_before_end(self):
target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7))
start = arrow.Arrow.fromdatetime(datetime(2013, 5, 8))
end = arrow.Arrow.fromdatetime(datetime(2013, 5, 5))
assert not target.is_between(start, end)
def test_exclusive_exclusive_bounds(self):
target = arrow.Arrow.fromdatetime(datetime(2013, 5, 5, 12, 30, 27))
start = arrow.Arrow.fromdatetime(datetime(2013, 5, 5, 12, 30, 10))
end = arrow.Arrow.fromdatetime(datetime(2013, 5, 5, 12, 30, 36))
assert target.is_between(start, end, "()")
def test_exclusive_exclusive_bounds_same_date(self):
target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7))
start = arrow.Arrow.fromdatetime(datetime(2013, 5, 7))
end = arrow.Arrow.fromdatetime(datetime(2013, 5, 7))
assert not target.is_between(start, end, "()")
def test_inclusive_exclusive_bounds(self):
target = arrow.Arrow.fromdatetime(datetime(2013, 5, 6))
start = arrow.Arrow.fromdatetime(datetime(2013, 5, 4))
end = arrow.Arrow.fromdatetime(datetime(2013, 5, 6))
assert not target.is_between(start, end, "[)")
def test_exclusive_inclusive_bounds(self):
target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7))
start = arrow.Arrow.fromdatetime(datetime(2013, 5, 5))
end = arrow.Arrow.fromdatetime(datetime(2013, 5, 7))
assert target.is_between(start, end, "(]")
def test_inclusive_inclusive_bounds_same_date(self):
target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7))
start = arrow.Arrow.fromdatetime(datetime(2013, 5, 7))
end = arrow.Arrow.fromdatetime(datetime(2013, 5, 7))
assert target.is_between(start, end, "[]")
def test_inclusive_inclusive_bounds_target_before_start(self):
target = arrow.Arrow.fromdatetime(datetime(2020, 12, 24))
start = arrow.Arrow.fromdatetime(datetime(2020, 12, 25))
end = arrow.Arrow.fromdatetime(datetime(2020, 12, 26))
assert not target.is_between(start, end, "[]")
def test_type_error_exception(self):
with pytest.raises(TypeError):
target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7))
start = datetime(2013, 5, 5)
end = arrow.Arrow.fromdatetime(datetime(2013, 5, 8))
target.is_between(start, end)
with pytest.raises(TypeError):
target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7))
start = arrow.Arrow.fromdatetime(datetime(2013, 5, 5))
end = datetime(2013, 5, 8)
target.is_between(start, end)
with pytest.raises(TypeError):
target.is_between(None, None)
def test_value_error_exception(self):
target = arrow.Arrow.fromdatetime(datetime(2013, 5, 7))
start = arrow.Arrow.fromdatetime(datetime(2013, 5, 5))
end = arrow.Arrow.fromdatetime(datetime(2013, 5, 8))
with pytest.raises(ValueError):
target.is_between(start, end, "][")
with pytest.raises(ValueError):
target.is_between(start, end, "")
with pytest.raises(ValueError):
target.is_between(start, end, "]")
with pytest.raises(ValueError):
target.is_between(start, end, "[")
with pytest.raises(ValueError):
target.is_between(start, end, "hello")
with pytest.raises(ValueError):
target.span("week", week_start=55)
class TestArrowUtil:
def test_get_datetime(self):
get_datetime = arrow.Arrow._get_datetime
arw = arrow.Arrow.utcnow()
dt = datetime.utcnow()
timestamp = time.time()
assert get_datetime(arw) == arw.datetime
assert get_datetime(dt) == dt
assert (
get_datetime(timestamp) == arrow.Arrow.utcfromtimestamp(timestamp).datetime
)
with pytest.raises(ValueError) as raise_ctx:
get_datetime("abc")
assert "not recognized as a datetime or timestamp" in str(raise_ctx.value)
def test_get_tzinfo(self):
get_tzinfo = arrow.Arrow._get_tzinfo
with pytest.raises(ValueError) as raise_ctx:
get_tzinfo("abc")
assert "not recognized as a timezone" in str(raise_ctx.value)
def test_get_iteration_params(self):
assert arrow.Arrow._get_iteration_params("end", None) == ("end", sys.maxsize)
assert arrow.Arrow._get_iteration_params(None, 100) == (arrow.Arrow.max, 100)
assert arrow.Arrow._get_iteration_params(100, 120) == (100, 120)
with pytest.raises(ValueError):
arrow.Arrow._get_iteration_params(None, None)
|
import pytest
def test_choice(serializer):
from abstract_open_traffic_generator.flow import Flow, TxRx, PortTxRx
try:
flow = Flow(name='test', tx_rx=TxRx())
assert('Expected a TypeError when assigning physical')
except TypeError as e:
print(e)
pass
def test_string(serializer):
from abstract_open_traffic_generator.port import Port
try:
port = Port(name=1)
assert('Expected a TypeError when assigning name')
except TypeError as e:
print(e)
pass
if __name__ == '__main__':
pytest.main(['-s', __file__])
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class Artifact(Model):
"""
:param name: The name of the artifact.
:type name: str
:param signed_content: Signed url for downloading this artifact
:type signed_content: :class:`SignedUrl <azure.devops.v6_0.pipelines.models.SignedUrl>`
:param url: Self-referential url
:type url: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'signed_content': {'key': 'signedContent', 'type': 'SignedUrl'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, name=None, signed_content=None, url=None):
super(Artifact, self).__init__()
self.name = name
self.signed_content = signed_content
self.url = url
class CreatePipelineConfigurationParameters(Model):
"""
:param type:
:type type: object
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'object'}
}
def __init__(self, type=None):
super(CreatePipelineConfigurationParameters, self).__init__()
self.type = type
class CreatePipelineParameters(Model):
"""
:param configuration:
:type configuration: :class:`CreatePipelineConfigurationParameters <azure.devops.v6_0.pipelines.models.CreatePipelineConfigurationParameters>`
:param folder:
:type folder: str
:param name:
:type name: str
"""
_attribute_map = {
'configuration': {'key': 'configuration', 'type': 'CreatePipelineConfigurationParameters'},
'folder': {'key': 'folder', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, configuration=None, folder=None, name=None):
super(CreatePipelineParameters, self).__init__()
self.configuration = configuration
self.folder = folder
self.name = name
class Log(Model):
"""
:param created_on: The date and time the log was created.
:type created_on: datetime
:param id: The ID of the log.
:type id: int
:param last_changed_on: The date and time the log was last changed.
:type last_changed_on: datetime
:param line_count: The number of lines in the log.
:type line_count: long
:param signed_content:
:type signed_content: :class:`SignedUrl <azure.devops.v6_0.pipelines.models.SignedUrl>`
:param url:
:type url: str
"""
_attribute_map = {
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'id': {'key': 'id', 'type': 'int'},
'last_changed_on': {'key': 'lastChangedOn', 'type': 'iso-8601'},
'line_count': {'key': 'lineCount', 'type': 'long'},
'signed_content': {'key': 'signedContent', 'type': 'SignedUrl'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, created_on=None, id=None, last_changed_on=None, line_count=None, signed_content=None, url=None):
super(Log, self).__init__()
self.created_on = created_on
self.id = id
self.last_changed_on = last_changed_on
self.line_count = line_count
self.signed_content = signed_content
self.url = url
class LogCollection(Model):
"""
:param logs:
:type logs: list of :class:`Log <azure.devops.v6_0.pipelines.models.Log>`
:param signed_content:
:type signed_content: :class:`SignedUrl <azure.devops.v6_0.pipelines.models.SignedUrl>`
:param url:
:type url: str
"""
_attribute_map = {
'logs': {'key': 'logs', 'type': '[Log]'},
'signed_content': {'key': 'signedContent', 'type': 'SignedUrl'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, logs=None, signed_content=None, url=None):
super(LogCollection, self).__init__()
self.logs = logs
self.signed_content = signed_content
self.url = url
class PipelineBase(Model):
"""
:param folder:
:type folder: str
:param id:
:type id: int
:param name:
:type name: str
:param revision:
:type revision: int
"""
_attribute_map = {
'folder': {'key': 'folder', 'type': 'str'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'revision': {'key': 'revision', 'type': 'int'}
}
def __init__(self, folder=None, id=None, name=None, revision=None):
super(PipelineBase, self).__init__()
self.folder = folder
self.id = id
self.name = name
self.revision = revision
class PipelineConfiguration(Model):
"""
:param type:
:type type: object
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'object'}
}
def __init__(self, type=None):
super(PipelineConfiguration, self).__init__()
self.type = type
class PipelineReference(PipelineBase):
"""
A reference to a Pipeline.
:param folder:
:type folder: str
:param id:
:type id: int
:param name:
:type name: str
:param revision:
:type revision: int
:param url:
:type url: str
"""
_attribute_map = {
'folder': {'key': 'folder', 'type': 'str'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'revision': {'key': 'revision', 'type': 'int'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, folder=None, id=None, name=None, revision=None, url=None):
super(PipelineReference, self).__init__(folder=folder, id=id, name=name, revision=revision)
self.url = url
class ReferenceLinks(Model):
"""
The class to represent a collection of REST reference links.
:param links: The readonly view of the links. Because Reference links are readonly, we only want to expose them as read only.
:type links: dict
"""
_attribute_map = {
'links': {'key': 'links', 'type': '{object}'}
}
def __init__(self, links=None):
super(ReferenceLinks, self).__init__()
self.links = links
class Repository(Model):
"""
:param type:
:type type: object
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'object'}
}
def __init__(self, type=None):
super(Repository, self).__init__()
self.type = type
class RepositoryResource(Model):
"""
:param ref_name:
:type ref_name: str
:param repository:
:type repository: :class:`Repository <azure.devops.v6_0.pipelines.models.Repository>`
:param version:
:type version: str
"""
_attribute_map = {
'ref_name': {'key': 'refName', 'type': 'str'},
'repository': {'key': 'repository', 'type': 'Repository'},
'version': {'key': 'version', 'type': 'str'}
}
def __init__(self, ref_name=None, repository=None, version=None):
super(RepositoryResource, self).__init__()
self.ref_name = ref_name
self.repository = repository
self.version = version
class RepositoryResourceParameters(Model):
"""
:param ref_name:
:type ref_name: str
:param token: This is the security token to use when connecting to the repository.
:type token: str
:param token_type: Optional. This is the type of the token given. If not provided, a type of "Bearer" is assumed. Note: Use "Basic" for a PAT token.
:type token_type: str
:param version:
:type version: str
"""
_attribute_map = {
'ref_name': {'key': 'refName', 'type': 'str'},
'token': {'key': 'token', 'type': 'str'},
'token_type': {'key': 'tokenType', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'}
}
def __init__(self, ref_name=None, token=None, token_type=None, version=None):
super(RepositoryResourceParameters, self).__init__()
self.ref_name = ref_name
self.token = token
self.token_type = token_type
self.version = version
class RunPipelineParameters(Model):
"""
:param resources:
:type resources: :class:`RunResourcesParameters <azure.devops.v6_0.pipelines.models.RunResourcesParameters>`
:param variables:
:type variables: dict
"""
_attribute_map = {
'resources': {'key': 'resources', 'type': 'RunResourcesParameters'},
'variables': {'key': 'variables', 'type': '{Variable}'}
}
def __init__(self, resources=None, variables=None):
super(RunPipelineParameters, self).__init__()
self.resources = resources
self.variables = variables
class RunReference(Model):
"""
:param id:
:type id: int
:param name:
:type name: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, id=None, name=None):
super(RunReference, self).__init__()
self.id = id
self.name = name
class RunResources(Model):
"""
:param repositories:
:type repositories: dict
"""
_attribute_map = {
'repositories': {'key': 'repositories', 'type': '{RepositoryResource}'}
}
def __init__(self, repositories=None):
super(RunResources, self).__init__()
self.repositories = repositories
class RunResourcesParameters(Model):
"""
:param repositories:
:type repositories: dict
"""
_attribute_map = {
'repositories': {'key': 'repositories', 'type': '{RepositoryResourceParameters}'}
}
def __init__(self, repositories=None):
super(RunResourcesParameters, self).__init__()
self.repositories = repositories
class SignalRConnection(Model):
"""
:param signed_content:
:type signed_content: :class:`SignedUrl <azure.devops.v6_0.pipelines.models.SignedUrl>`
"""
_attribute_map = {
'signed_content': {'key': 'signedContent', 'type': 'SignedUrl'}
}
def __init__(self, signed_content=None):
super(SignalRConnection, self).__init__()
self.signed_content = signed_content
class SignedUrl(Model):
"""
A signed url allowing limited-time anonymous access to private resources.
:param signature_expires:
:type signature_expires: datetime
:param url:
:type url: str
"""
_attribute_map = {
'signature_expires': {'key': 'signatureExpires', 'type': 'iso-8601'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, signature_expires=None, url=None):
super(SignedUrl, self).__init__()
self.signature_expires = signature_expires
self.url = url
class Variable(Model):
"""
:param is_secret:
:type is_secret: bool
:param value:
:type value: str
"""
_attribute_map = {
'is_secret': {'key': 'isSecret', 'type': 'bool'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, is_secret=None, value=None):
super(Variable, self).__init__()
self.is_secret = is_secret
self.value = value
class Pipeline(PipelineBase):
"""
:param folder:
:type folder: str
:param id:
:type id: int
:param name:
:type name: str
:param revision:
:type revision: int
:param _links:
:type _links: :class:`ReferenceLinks <azure.devops.v6_0.pipelines.models.ReferenceLinks>`
:param configuration:
:type configuration: :class:`PipelineConfiguration <azure.devops.v6_0.pipelines.models.PipelineConfiguration>`
:param url:
:type url: str
"""
_attribute_map = {
'folder': {'key': 'folder', 'type': 'str'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'revision': {'key': 'revision', 'type': 'int'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'configuration': {'key': 'configuration', 'type': 'PipelineConfiguration'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, folder=None, id=None, name=None, revision=None, _links=None, configuration=None, url=None):
super(Pipeline, self).__init__(folder=folder, id=id, name=name, revision=revision)
self._links = _links
self.configuration = configuration
self.url = url
class Run(RunReference):
"""
:param id:
:type id: int
:param name:
:type name: str
:param _links:
:type _links: :class:`ReferenceLinks <azure.devops.v6_0.pipelines.models.ReferenceLinks>`
:param created_date:
:type created_date: datetime
:param finished_date:
:type finished_date: datetime
:param pipeline:
:type pipeline: :class:`PipelineReference <azure.devops.v6_0.pipelines.models.PipelineReference>`
:param resources:
:type resources: :class:`RunResources <azure.devops.v6_0.pipelines.models.RunResources>`
:param result:
:type result: object
:param state:
:type state: object
:param url:
:type url: str
:param variables:
:type variables: dict
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'finished_date': {'key': 'finishedDate', 'type': 'iso-8601'},
'pipeline': {'key': 'pipeline', 'type': 'PipelineReference'},
'resources': {'key': 'resources', 'type': 'RunResources'},
'result': {'key': 'result', 'type': 'object'},
'state': {'key': 'state', 'type': 'object'},
'url': {'key': 'url', 'type': 'str'},
'variables': {'key': 'variables', 'type': '{Variable}'}
}
def __init__(self, id=None, name=None, _links=None, created_date=None, finished_date=None, pipeline=None, resources=None, result=None, state=None, url=None, variables=None):
super(Run, self).__init__(id=id, name=name)
self._links = _links
self.created_date = created_date
self.finished_date = finished_date
self.pipeline = pipeline
self.resources = resources
self.result = result
self.state = state
self.url = url
self.variables = variables
__all__ = [
'Artifact',
'CreatePipelineConfigurationParameters',
'CreatePipelineParameters',
'Log',
'LogCollection',
'PipelineBase',
'PipelineConfiguration',
'PipelineReference',
'ReferenceLinks',
'Repository',
'RepositoryResource',
'RepositoryResourceParameters',
'RunPipelineParameters',
'RunReference',
'RunResources',
'RunResourcesParameters',
'SignalRConnection',
'SignedUrl',
'Variable',
'Pipeline',
'Run',
]
|
from mitmproxy.proxy import context
from mitmproxy.test import tflow, taddons
def test_context():
with taddons.context() as tctx:
c = context.Context(
tflow.tclient_conn(),
tctx.options
)
assert repr(c)
c.layers.append(1)
assert repr(c)
c2 = c.fork()
c.layers.append(2)
c2.layers.append(3)
assert c.layers == [1, 2]
assert c2.layers == [1, 3]
|
"""
Hand class tests.
"""
#==================================================================================================#
# Bibliotecas utilizadas:
from ctypes import sizeof
import unittest
from PokerHand import PokerHand
CARDS: str = "KS 2H 5C JD TD"
hand2: str = "9C 9H 5C 5H AC"
class TestPokerHandCompareWith(unittest.TestCase):
def setUp(self):
"""Set up test fixtures."""
self.handTest = PokerHand(hand=CARDS)
self.handTest2 = PokerHand(hand=hand2)
#=================================================================================#
def test_PokerHand_compare_with_returns_a_bool(self):
"""Whether compare_with returns a bool"""
self.assertIsInstance(self.handTest.compare_with(self.handTest2), bool)
def test_PokerHand_compare_with_none_cards(self):
"""Whether compare_with is empty"""
self.assertIsNotNone(self.handTest.compare_with(self.handTest2), "Has no pair to check!")
#=================================================================================#
# More tests....
def test_final_tests(self) -> bool:
"""Final tests"""
# ==> Comparation incorrect. Because Ace > King.
# self.assertTrue(PokerHand("TC TH 5C 5H KH").compare_with(PokerHand("9C 9H 5C 5H AC")) == True)
self.assertTrue(PokerHand("TC TH 5C 5H KH").compare_with(PokerHand("9C 9H 5C 5H AC")) == False)
self.assertTrue(PokerHand("TS TD KC JC 7C").compare_with(PokerHand("JS JC AS KC TD")) == False)
self.assertTrue(PokerHand("7H 7C QC JS TS").compare_with(PokerHand("7D 7C JS TS 6D")) == True)
self.assertTrue(PokerHand("5S 5D 8C 7S 6H").compare_with(PokerHand("7D 7S 5S 5D JS")) == False)
self.assertTrue(PokerHand("AS AD KD 7C 3D").compare_with(PokerHand("AD AH KD 7C 4S")) == False)
self.assertTrue(PokerHand("TS JS QS KS AS").compare_with(PokerHand("AC AH AS AS KS")) == True)
self.assertTrue(PokerHand("TS JS QS KS AS").compare_with(PokerHand("TC JS QC KS AC")) == True)
self.assertTrue(PokerHand("TS JS QS KS AS").compare_with(PokerHand("QH QS QC AS 8H")) == True)
self.assertTrue(PokerHand("AC AH AS AS KS").compare_with(PokerHand("TC JS QC KS AC")) == True)
self.assertTrue(PokerHand("AC AH AS AS KS").compare_with(PokerHand("QH QS QC AS 8H")) == True)
self.assertTrue(PokerHand("TC JS QC KS AC").compare_with(PokerHand("QH QS QC AS 8H")) == True)
self.assertTrue(PokerHand("7H 8H 9H TH JH").compare_with(PokerHand("JH JC JS JD TH")) == True)
self.assertTrue(PokerHand("7H 8H 9H TH JH").compare_with(PokerHand("4H 5H 9H TH JH")) == True)
self.assertTrue(PokerHand("7H 8H 9H TH JH").compare_with(PokerHand("7C 8S 9H TH JH")) == True)
self.assertTrue(PokerHand("7H 8H 9H TH JH").compare_with(PokerHand("TS TH TD JH JD")) == True)
self.assertTrue(PokerHand("7H 8H 9H TH JH").compare_with(PokerHand("JH JD TH TC 4C")) == True)
self.assertTrue(PokerHand("JH JC JS JD TH").compare_with(PokerHand("4H 5H 9H TH JH")) == True)
self.assertTrue(PokerHand("JH JC JS JD TH").compare_with(PokerHand("7C 8S 9H TH JH")) == True)
self.assertTrue(PokerHand("JH JC JS JD TH").compare_with(PokerHand("TS TH TD JH JD")) == True)
self.assertTrue(PokerHand("JH JC JS JD TH").compare_with(PokerHand("JH JD TH TC 4C")) == True)
self.assertTrue(PokerHand("4H 5H 9H TH JH").compare_with(PokerHand("7C 8S 9H TH JH")) == True)
self.assertTrue(PokerHand("4H 5H 9H TH JH").compare_with(PokerHand("TS TH TD JH JD")) == False)
self.assertTrue(PokerHand("4H 5H 9H TH JH").compare_with(PokerHand("JH JD TH TC 4C")) == True)
self.assertTrue(PokerHand("7C 8S 9H TH JH").compare_with(PokerHand("TS TH TD JH JD")) == False)
self.assertTrue(PokerHand("7C 8S 9H TH JH").compare_with(PokerHand("JH JD TH TC 4C")) == True)
self.assertTrue(PokerHand("TS TH TD JH JD").compare_with(PokerHand("JH JD TH TC 4C")) == True)
#=================================================================================#
if __name__ == "__main__":
unittest.main()
|
# -*- coding: utf-8 -*-
#
# Pyserini: Python interface to the Anserini IR toolkit built on Lucene
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from jnius import autoclass
import sys
args = sys.argv[1:]
JIndexCollection = autoclass('io.anserini.index.IndexCollection')
JIndexCollection.main(args)
|
#!/usr/bin/env python
import json
import yaml
import urllib
import os
import sys
from jsonref import JsonRef
import click
KINDS_WITH_JSONSCHEMA = [
"jsonschemaprops",
"jsonschemapropsorarray",
"customresourcevalidation",
"customresourcedefinition",
"customresourcedefinitionspec",
"customresourcedefinitionlist",
"customresourcedefinitionspec",
"customresourcedefinitionversion",
"jsonschemapropsorstringarray",
"jsonschemapropsorbool",
]
class UnsupportedError(Exception):
pass
def additional_properties(data):
"This recreates the behaviour of kubectl at https://github.com/kubernetes/kubernetes/blob/225b9119d6a8f03fcbe3cc3d590c261965d928d0/pkg/kubectl/validation/schema.go#L312"
new = {}
try:
for k, v in data.iteritems():
new_v = v
if isinstance(v, dict):
if "properties" in v:
if "additionalProperties" not in v:
v["additionalProperties"] = False
new_v = additional_properties(v)
else:
new_v = v
new[k] = new_v
return new
except AttributeError:
return data
def replace_int_or_string(data):
new = {}
try:
for k, v in data.iteritems():
new_v = v
if isinstance(v, dict):
if 'format' in v and v['format'] == 'int-or-string':
new_v = {'oneOf': [
{'type': 'string'},
{'type': 'integer'},
]}
else:
new_v = replace_int_or_string(v)
elif isinstance(v, list):
new_v = list()
for x in v:
new_v.append(replace_int_or_string(x))
else:
new_v = v
new[k] = new_v
return new
except AttributeError:
return data
def allow_null_optional_fields(data, parent=None, grand_parent=None, key=None):
new = {}
try:
for k, v in data.iteritems():
new_v = v
if isinstance(v, dict):
new_v = allow_null_optional_fields(v, data, parent, k)
elif isinstance(v, list):
new_v = list()
for x in v:
new_v.append(allow_null_optional_fields(x, v, parent, k))
elif isinstance(v, basestring):
is_array = k == "type" and v == "array"
is_string = k == "type" and v == "string"
has_required_fields = grand_parent and "required" in grand_parent
is_required_field = has_required_fields and key in grand_parent["required"]
if is_array and not is_required_field:
new_v = ["array", "null"]
elif is_string and not is_required_field:
new_v = ["string", "null"]
new[k] = new_v
return new
except AttributeError:
return data
def change_dict_values(d, prefix, version):
new = {}
try:
for k, v in d.iteritems():
new_v = v
if isinstance(v, dict):
new_v = change_dict_values(v, prefix, version)
elif isinstance(v, list):
new_v = list()
for x in v:
new_v.append(change_dict_values(x, prefix, version))
elif isinstance(v, basestring):
if k == "$ref":
if version < '3':
new_v = "%s%s" % (prefix, v)
else:
new_v = v.replace("#/components/schemas/", "") + ".json"
else:
new_v = v
new[k] = new_v
return new
except AttributeError:
return d
def info(message):
click.echo(click.style(message, fg='green'))
def debug(message):
click.echo(click.style(message, fg='yellow'))
def error(message):
click.echo(click.style(message, fg='red'))
def group_version_kind(title):
return title.lower().split('.')[-3:]
def output_filename(group, version, kind):
if group == "core":
return "%s-%s.json" % (kind, version)
else:
return "%s-%s-%s.json" % (kind, group, version)
@click.command()
@click.option('-o', '--output', default='schemas', metavar='PATH', help='Directory to store schema files')
@click.option('-p', '--prefix', default='_definitions.json', help='Prefix for JSON references (only for OpenAPI versions before 3.0)')
@click.option('--stand-alone', is_flag=True, help='Whether or not to de-reference JSON schemas')
@click.option('--kubernetes', is_flag=True, help='Enable Kubernetes specific processors')
@click.option('--strict', is_flag=True, help='Prohibits properties not in the schema (additionalProperties: false)')
@click.argument('schema', metavar='SCHEMA_URL')
def default(output, schema, prefix, stand_alone, kubernetes, strict):
"""
Converts a valid OpenAPI specification into a set of JSON Schema files
"""
info("Downloading schema %s" % schema)
if sys.version_info < (3, 0):
response = urllib.urlopen(schema)
else:
if os.path.isfile(schema):
schema = 'file://' + os.path.realpath(schema)
req = urllib.request.Request(schema)
response = urllib.request.urlopen(req)
info("Parsing schema %s" % schema)
# Note that JSON is valid YAML, so we can use the YAML parser whether
# the schema is stored in JSON or YAML
data = yaml.load(response.read())
if 'swagger' in data:
version = data['swagger']
elif 'openapi' in data:
version = data['openapi']
if not os.path.exists(output):
os.makedirs(output)
if version < '3':
with open("%s/_definitions.json" % output, 'w') as definitions_file:
info("Generating shared definitions")
definitions = data['definitions']
if kubernetes:
definitions['io.k8s.apimachinery.pkg.util.intstr.IntOrString'] = {'oneOf': [
{'type': 'string'},
{'type': 'integer'},
]}
definitions['io.k8s.apimachinery.pkg.api.resource.Quantity'] = {'oneOf': [
{'type': 'string'},
{'type': 'integer'},
]}
if strict:
definitions = additional_properties(definitions)
definitions_file.write(json.dumps({"definitions": definitions}, indent=2))
info("Generating individual schemas")
if version < '3':
components = data['definitions']
else:
components = data['components']['schemas']
for title in components:
if title.startswith('io.k8s.kubernetes.pkg.apis'):
continue
group, api_version, kind = group_version_kind(title)
if group == "api":
continue
specification = components[title]
specification["$schema"] = "http://json-schema.org/schema#"
specification.setdefault("type", "object")
try:
debug("Processing %s, %s" % (kind, api_version))
updated = change_dict_values(specification, prefix, version)
specification = updated
# This list of Kubernets types carry around jsonschema for Kubernetes and don't
# currently work with openapi2jsonschema
if kubernetes and stand_alone and kind in KINDS_WITH_JSONSCHEMA:
raise UnsupportedError("%s not currently supported" % kind)
if stand_alone:
base = "file://%s/%s/" % (os.getcwd(), output)
specification = JsonRef.replace_refs(specification, base_uri=base)
if "additionalProperties" in specification:
if specification["additionalProperties"]:
updated = change_dict_values(specification["additionalProperties"], prefix, version)
specification["additionalProperties"] = updated
if "properties" in specification:
if strict:
updated = additional_properties(specification["properties"])
specification["properties"] = updated
if kubernetes:
updated = replace_int_or_string(specification["properties"])
updated = allow_null_optional_fields(updated)
specification["properties"] = updated
else:
specification = {
"$schema": specification['$schema'],
"$ref": "_definitions.json#/definitions/%s" % title,
"description": specification.get('description'),
"type": specification['type']
}
schema_file_name = output_filename(group, api_version, kind)
with open("%s/%s" % (output, schema_file_name), 'w') as schema_file:
debug("Generating %s" % schema_file_name)
schema_file.write(json.dumps(specification, indent=2))
except Exception as e:
error("An error occured processing %s: %s" % (kind, e))
with open("%s/all.json" % output, 'w') as all_file:
info("Generating schema for all types")
contents = {"oneOf": []}
for title in components:
if version < '3':
if stand_alone:
contents["oneOf"].append({"$ref": "%s#/%s" % (prefix.replace('_definitions.json', output_filename(*group_version_kind(title))), title)})
else:
contents["oneOf"].append({"$ref": "%s#/definitions/%s" % (prefix, title)})
else:
contents["oneOf"].append({"$ref": (title.replace("#/components/schemas/", "") + ".json")})
all_file.write(json.dumps(contents, indent=2))
if stand_alone:
os.remove("%s/_definitions.json" % output)
if __name__ == '__main__':
default()
|
import numpy as np
import pydicom
from pydicom.data import get_testdata_files
from tensorflow.keras.utils import Sequence
from tensorflow.keras.utils import to_categorical
from skimage.transform import resize
import config
# Will need to encode categories before calling the Data Generator
# Also before implement split for validation and train include shuffling
"""
INPUTS FOR DATAGENERATOR CLASS
list_IDs - array of strings of file paths to dcm file
labels - encoded labels for the corresponding files
batch size
dimensions - as from config the dimension of input image to the model
n_channels - number of channels for image - (will be 1 for us in greyscale)
shuffle - shuffle starting position for each epoch
"""
"""
IMPLEMENTATION
Assuming csv file with available information loaded as a dataframe
- extract labels from dataframe
- extract paths from dataframe
- encode labels from dataframe
instantiate train_datagenerator and validation_datagenerator
run training using :
model.fit(x=training_generator,
= validation_data=validation_generator,
epochs = config.epochs,
callbacks=[
EarlyStopping(monitor='val_categorical_accuracy', patience=10, restore_best_weights=True),
ReduceLROnPlateau(patience=6)
]
"""
class DataGenerator(Sequence):
"""
Generates data using Sequence to cycle through images to be processed for training
"""
def __init__(self, list_IDs, labels, batch_size=config.BATCH_SIZE,
dim=(config.VGG_IMG_SIZE['HEIGHT'], config.VGG_IMG_SIZE['WIDTH']),
n_channels=1, shuffle=True):
'Initialization'
self.dim = dim
self.labels = labels
self.batch_size = batch_size
self.list_IDs = list_IDs
self.n_channels = n_channels
self.shuffle = shuffle
self.on_epoch_end()
def __len__(self):
"""
Denotes the number of batches to be run per epoch
:return: Number of batches
"""
return int(np.floor(len(self.list_IDs) / self.batch_size))
def __getitem__(self, index):
"""
:param index: point in samples
:return: generates a batch
"""
# Generate indexes of the batch
indexes = self.indexes[index * self.batch_size:(index + 1) * self.batch_size]
# Find list of IDs
list_IDs_temp = [self.list_IDs[k] for k in indexes]
y = [self.labels[k] for k in indexes]
# Generate data
X = self.__data_generation(list_IDs_temp)
return X, np.array(y)
def on_epoch_end(self):
"""
Update the order of indexes at the very beginning and at the end of each epoch (so that batches between epochs
do not look alike to get a more robust model).
"""
self.indexes = np.arange(len(self.list_IDs))
if self.shuffle == True:
np.random.shuffle(self.indexes)
def __data_generation(self, list_IDs_temp):
"""
Generate the data for a batch
:param list_IDs_temp: ID's to be in the batch
:return: images as arrays for the batch
"""
'Generates data containing batch_size samples' # X : (n_samples, *dim, n_channels)
# Initialization
X = np.empty((self.batch_size, *self.dim, self.n_channels))
# Generate data
for i, ID in enumerate(list_IDs_temp):
# process dicom image into array and resize to input dimensions and add to batch
X[i,] = load_dicom(ID, self.dim)
return X
def load_dicom(path, dim):
"""
Method
:param path:
:param dim:
:return:
"""
image_dicom = pydicom.dcmread(path)
image_as_array = image_dicom.pixel_array
resized_image = resize(image_as_array, (dim[0], dim[1], 1), anti_aliasing=True)
return resized_image
|
import vcf_data_loader
def compute_ld(data):
n = data.size()[0]
# Standardize
data = (data - data.mean(dim=0)) / data.std(dim=0)
return (data.transpose(0, 1) @ data / n) ** 2
if __name__ == "__main__":
import matplotlib.pyplot as plt
vcf = vcf_data_loader.FixedSizeVCFChunks(
"all_1kg_chr1_phased_GRCh38_snps_maf0.01.recode.vcf.gz"
)
data = vcf.get_dataset_for_chunk_id(20)
ld = compute_ld(data)
plt.matshow(ld)
plt.colorbar()
plt.show()
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2017 John Dewey
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import pytest
@pytest.fixture()
def first_match_urls():
return {
'http://example.com/owner/repo.git': {
'pathname': '/owner/repo.git',
'protocols': ['http'],
'protocol': 'http',
'href': 'http://example.com/owner/repo.git',
'resource': 'example.com',
'user': None,
'port': None,
'name': 'repo',
'owner': 'owner',
},
'http://example.com/owner/repo': {
'pathname': '/owner/repo',
'protocols': ['http'],
'protocol': 'http',
'href': 'http://example.com/owner/repo',
'resource': 'example.com',
'user': None,
'port': None,
'name': 'repo',
'owner': 'owner',
},
'http://example.com/owner/repo/': {
'pathname': '/owner/repo/',
'protocols': ['http'],
'protocol': 'http',
'href': 'http://example.com/owner/repo/',
'resource': 'example.com',
'user': None,
'port': None,
'name': 'repo',
'owner': 'owner',
},
'http://user@example.com/user/repo': {
'pathname': '/user/repo',
'protocols': ['http'],
'protocol': 'http',
'href': 'http://user@example.com/user/repo',
'resource': 'example.com',
'user': 'user',
'port': None,
'name': 'repo',
'owner': 'user',
},
'http://example.com:29418/owner/repo.git': {
'pathname': '/owner/repo.git',
'protocols': ['http'],
'protocol': 'http',
'href': 'http://example.com:29418/owner/repo.git',
'resource': 'example.com',
'user': None,
'port': '29418',
'name': 'repo',
'owner': 'owner',
},
'http://user@example.com:29418/user/repo': {
'pathname': '/user/repo',
'protocols': ['http'],
'protocol': 'http',
'href': 'http://user@example.com:29418/user/repo',
'resource': 'example.com',
'user': 'user',
'port': '29418',
'name': 'repo',
'owner': 'user',
},
'http://user@example.com:29418/user/repo/': {
'pathname': '/user/repo/',
'protocols': ['http'],
'protocol': 'http',
'href': 'http://user@example.com:29418/user/repo/',
'resource': 'example.com',
'user': 'user',
'port': '29418',
'name': 'repo',
'owner': 'user',
},
'http://example.com/repo': {
'pathname': '/repo',
'protocols': ['http'],
'protocol': 'http',
'href': 'http://example.com/repo',
'resource': 'example.com',
'user': None,
'port': None,
'name': 'repo',
'owner': None,
},
'https://example.com/owner/repo.git': {
'pathname': '/owner/repo.git',
'protocols': ['https'],
'protocol': 'https',
'href': 'https://example.com/owner/repo.git',
'resource': 'example.com',
'user': None,
'port': None,
'name': 'repo',
'owner': 'owner',
},
'https://example.com/owner/repo': {
'pathname': '/owner/repo',
'protocols': ['https'],
'protocol': 'https',
'href': 'https://example.com/owner/repo',
'resource': 'example.com',
'user': None,
'port': None,
'name': 'repo',
'owner': 'owner',
},
'https://user@example.com/user/repo': {
'pathname': '/user/repo',
'protocols': ['https'],
'protocol': 'https',
'href': 'https://user@example.com/user/repo',
'resource': 'example.com',
'user': 'user',
'port': None,
'name': 'repo',
'owner': 'user',
},
'https://example.com:29418/owner/repo.git': {
'pathname': '/owner/repo.git',
'protocols': ['https'],
'protocol': 'https',
'href': 'https://example.com:29418/owner/repo.git',
'resource': 'example.com',
'user': None,
'port': '29418',
'name': 'repo',
'owner': 'owner',
},
'https://user@example.com:29418/user/repo': {
'pathname': '/user/repo',
'protocols': ['https'],
'protocol': 'https',
'href': 'https://user@example.com:29418/user/repo',
'resource': 'example.com',
'user': 'user',
'port': '29418',
'name': 'repo',
'owner': 'user',
},
'https://example.com/repo': {
'pathname': '/repo',
'protocols': ['https'],
'protocol': 'https',
'href': 'https://example.com/repo',
'resource': 'example.com',
'user': None,
'port': None,
'name': 'repo',
'owner': None,
},
'rsync://example.com/owner/repo.git': {
'pathname': '/owner/repo.git',
'protocols': ['rsync'],
'protocol': 'rsync',
'href': 'rsync://example.com/owner/repo.git',
'resource': 'example.com',
'user': None,
'port': None,
'name': 'repo',
'owner': 'owner',
},
'git://example.com/owner/repo.git': {
'pathname': '/owner/repo.git',
'protocols': ['git'],
'protocol': 'git',
'href': 'git://example.com/owner/repo.git',
'resource': 'example.com',
'user': None,
'port': None,
'name': 'repo',
'owner': 'owner',
},
'git://example.com/owner/repo': {
'pathname': '/owner/repo',
'protocols': ['git'],
'protocol': 'git',
'href': 'git://example.com/owner/repo',
'resource': 'example.com',
'user': None,
'port': None,
'name': 'repo',
'owner': 'owner',
},
'git://example.com/owner/repo/': {
'pathname': '/owner/repo/',
'protocols': ['git'],
'protocol': 'git',
'href': 'git://example.com/owner/repo/',
'resource': 'example.com',
'user': None,
'port': None,
'name': 'repo',
'owner': 'owner',
},
'ssh://user@example.com/owner/repo.git': {
'pathname': '/owner/repo.git',
'protocols': ['ssh'],
'protocol': 'ssh',
'href': 'ssh://user@example.com/owner/repo.git',
'resource': 'example.com',
'user': 'user',
'port': None,
'name': 'repo',
'owner': 'owner',
},
'ssh://user@example.com:29418/owner/repo.git': {
'pathname': '/owner/repo.git',
'protocols': ['ssh'],
'protocol': 'ssh',
'href': 'ssh://user@example.com:29418/owner/repo.git',
'resource': 'example.com',
'user': 'user',
'port': '29418',
'name': 'repo',
'owner': 'owner',
},
'ssh://example.com/owner/repo.git': {
'pathname': '/owner/repo.git',
'protocols': ['ssh'],
'protocol': 'ssh',
'href': 'ssh://example.com/owner/repo.git',
'resource': 'example.com',
'user': None,
'port': None,
'name': 'repo',
'owner': 'owner',
},
'ssh://example.com:29418/owner/repo.git': {
'pathname': '/owner/repo.git',
'protocols': ['ssh'],
'protocol': 'ssh',
'href': 'ssh://example.com:29418/owner/repo.git',
'resource': 'example.com',
'user': None,
'port': '29418',
'name': 'repo',
'owner': 'owner',
},
# https://github.com/retr0h/git-url-parse/issues/29
'https://github.com/sphinx-doc/sphinx.git': {
'pathname': '/sphinx-doc/sphinx.git',
'protocols': ['https'],
'protocol': 'https',
'href': 'https://github.com/sphinx-doc/sphinx.git',
'resource': 'github.com',
'user': None,
'port': None,
'name': 'sphinx',
'owner': 'sphinx-doc',
},
# https://github.com/retr0h/git-url-parse/issues/33
'https://github.com/tterranigma/Stouts.openvpn': {
'pathname': '/tterranigma/Stouts.openvpn',
'protocols': ['https'],
'protocol': 'https',
'href': 'https://github.com/tterranigma/Stouts.openvpn',
'resource': 'github.com',
'user': None,
'port': None,
'name': 'Stouts.openvpn',
'owner': 'tterranigma',
},
'https://github.com/tterranigma/Stouts.openvpn/': {
'pathname': '/tterranigma/Stouts.openvpn/',
'protocols': ['https'],
'protocol': 'https',
'href': 'https://github.com/tterranigma/Stouts.openvpn/',
'resource': 'github.com',
'user': None,
'port': None,
'name': 'Stouts.openvpn',
'owner': 'tterranigma',
},
# https://github.com/retr0h/git-url-parse/issues/33
'https://github.com/tterranigma/Stouts.openvpn.git': {
'pathname': '/tterranigma/Stouts.openvpn.git',
'protocols': ['https'],
'protocol': 'https',
'href': 'https://github.com/tterranigma/Stouts.openvpn.git',
'resource': 'github.com',
'user': None,
'port': None,
'name': 'Stouts.openvpn',
'owner': 'tterranigma',
},
}
@pytest.fixture()
def second_match_urls():
return {
'git+ssh://example.com/owner/repo.git': {
'pathname': '/owner/repo.git',
'protocols': ['git', 'ssh'],
'protocol': 'ssh',
'href': 'git+ssh://example.com/owner/repo.git',
'resource': 'example.com',
'user': None,
'port': None,
'name': 'repo',
'owner': 'owner',
},
'git+ssh://example.com:9999/owner/repo.git': {
'pathname': '/owner/repo.git',
'protocols': ['git', 'ssh'],
'protocol': 'ssh',
'href': 'git+ssh://example.com:9999/owner/repo.git',
'resource': 'example.com',
'user': None,
'port': '9999',
'name': 'repo',
'owner': 'owner',
},
'git+https://example.com/owner/repo.git': {
'pathname': '/owner/repo.git',
'protocols': ['git', 'https'],
'protocol': 'https',
'href': 'git+https://example.com/owner/repo.git',
'resource': 'example.com',
'user': None,
'port': None,
'name': 'repo',
'owner': 'owner',
},
'git+https://example.com:9999/owner/repo.git': {
'pathname': '/owner/repo.git',
'protocols': ['git', 'https'],
'protocol': 'https',
'href': 'git+https://example.com:9999/owner/repo.git',
'resource': 'example.com',
'user': None,
'port': '9999',
'name': 'repo',
'owner': 'owner',
},
}
@pytest.fixture()
def third_match_urls():
return {
'user@example.com:/owner/repo.git': {
'pathname': '/owner/repo.git',
'protocols': [],
'protocol': 'ssh',
'href': 'user@example.com:/owner/repo.git',
'resource': 'example.com',
'user': 'user',
'port': None,
'name': 'repo',
'owner': 'owner',
},
'user@example.com:owner/repo.git': {
'pathname': 'owner/repo.git',
'protocols': [],
'protocol': 'ssh',
'href': 'user@example.com:owner/repo.git',
'resource': 'example.com',
'user': 'user',
'port': None,
'name': 'repo',
'owner': 'owner',
},
'user@foo-example.com:owner/repo.git': {
'pathname': 'owner/repo.git',
'protocols': [],
'protocol': 'ssh',
'href': 'user@foo-example.com:owner/repo.git',
'resource': 'foo-example.com',
'user': 'user',
'port': None,
'name': 'repo',
'owner': 'owner',
},
# GitLab
'user@foo-example.com:9999/owner/repo.git': {
'pathname': '/owner/repo.git',
'protocols': [],
'protocol': 'ssh',
'href': 'user@foo-example.com:9999/owner/repo.git',
'resource': 'foo-example.com',
'user': 'user',
'port': '9999',
'name': 'repo',
'owner': 'owner',
},
}
@pytest.fixture()
def fourth_match_urls():
return {
# NOTE(retr0h): This should really be handled by regexp group 3
'user@example.com:repo.git': {
'pathname': 'repo.git',
'protocols': [],
'protocol': 'ssh',
'href': 'user@example.com:repo.git',
'resource': 'example.com',
'user': 'user',
'port': None,
'name': 'repo',
'owner': None,
},
'example.com:/owner/repo.git': {
'pathname': '/owner/repo.git',
'protocols': [],
'protocol': 'ssh',
'href': 'example.com:/owner/repo.git',
'resource': 'example.com',
'user': None,
'port': None,
'name': 'repo',
'owner': 'owner',
},
'example.com:owner/repo.git': {
'pathname': 'owner/repo.git',
'protocols': [],
'protocol': 'ssh',
'href': 'example.com:owner/repo.git',
'resource': 'example.com',
'user': None,
'port': None,
'name': 'repo',
'owner': 'owner',
},
'example.com:repo.git': {
'pathname': 'repo.git',
'protocols': [],
'protocol': 'ssh',
'href': 'example.com:repo.git',
'resource': 'example.com',
'user': None,
'port': None,
'name': 'repo',
'owner': None,
},
}
@pytest.fixture()
def invalid_strings():
return ['', 'not a valid URL']
|
# Copyright (c) 2012-2021, Camptocamp SA
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# The views and conclusions contained in the software and documentation are those
# of the authors and should not be interpreted as representing official policies,
# either expressed or implied, of the FreeBSD Project.
import json
from typing import Any, Dict, Optional, Type
from sqlalchemy.engine import Dialect
from sqlalchemy.types import VARCHAR, TypeDecorator, UserDefinedType
# get from https://docs.sqlalchemy.org/en/latest/orm/extensions/
# mutable.html#establishing-mutability-on-scalar-column-values
class JSONEncodedDict(TypeDecorator): # type: ignore
"""Represent an immutable structure as a json-encoded string."""
impl = VARCHAR
@staticmethod
def process_bind_param(value: Optional[Dict[str, Any]], _: Dialect) -> Optional[str]:
return json.dumps(value) if value is not None else None
@staticmethod
def process_result_value(value: Optional[str], _: Dialect) -> Optional[Dict[str, Any]]:
return json.loads(value) if value is not None else None
@property
def python_type(self) -> Type[Any]:
return dict
@staticmethod
def process_literal_param(value: str, dialect: Any) -> str:
del dialect
return json.dumps(value)
class TsVector(UserDefinedType): # type: ignore
"""A custom type for PostgreSQL's tsvector type."""
def get_col_spec(self) -> str: # pylint: disable=no-self-use
return "TSVECTOR"
@property
def python_type(self) -> Type[Any]:
return dict
|
default_app_config = 'prosopography.apps.ProsopographyConfig'
|
from predictionserver.futureconventions.performanceconventions import (
PerformanceConventions
)
class PerformanceHabits(PerformanceConventions):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self._PERFORMANCE_BACKWARD_COMPATIBLE = True
|
# <<BEGIN-copyright>>
# Copyright 2019, Lawrence Livermore National Security, LLC.
# See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: MIT
# <<END-copyright>>
from __future__ import print_function
import os
from numericalFunctions import pointwiseXY_C
if( 'CHECKOPTIONS' in os.environ ) :
options = os.environ['CHECKOPTIONS'].split( )
if( '-e' in options ) : print( __file__ )
CPATH = '../../../../Test/UnitTesting/Others'
os.system( 'cd %s; make -s clean; ./trim -v > v' % CPATH )
def skipBlankLines( ls ) :
i = 0
for i, l in enumerate( ls ) :
if( l.strip( ) != '' ) : break
ls = ls[i:]
if( ( len( ls ) == 1 ) and ( ls[0].strip( ) == '' ) ) : ls = []
return( ls )
def getIntegerValue( name, ls ) :
s = "# %s = " % name
n = len( s )
if( ls[0][:n] != s ) : raise Exception( '%s: missing %s info: "%s"' % ( __file__, name, ls[0][:-1] ) )
value = int( ls[0].split( '=' )[1] )
return( ls[1:], value )
def getDoubleValue( name, ls ) :
s = "# %s = " % name
n = len( s )
if( ls[0][:n] != s ) : raise Exception( '%s: missing %s info: "%s"' % ( __file__, name, ls[0][:-1] ) )
value = float( ls[0].split( '=' )[1] )
return( ls[1:], value )
def compareValues( label, i, v1, v2 ) :
sv1, sv2 = '%.12e' % v1, '%.12e' % v2
sv1, sv2 = '%.7e' % float( sv1 ), '%.7e' % float( sv2 )
if( sv1 != sv2 ) : print( '<%s> <%s>' % ( sv1, sv2 ) )
if( sv1 != sv2 ) : raise Exception( '%s: values %e and %e diff by %e at %d for label = %s' % ( __file__, v1, v2, v2 - v1, i, label ) )
def getXYData( ls ) :
ls, length = getIntegerValue( 'length', ls )
data = [ list( map( float, ls[i].split( ) ) ) for i in range( length ) ]
data = pointwiseXY_C.pointwiseXY_C( data, initialSize = len( data ), overflowSize = 10 )
ls = ls[length:]
ls = skipBlankLines( ls )
return( ls, data )
def checkClipping( count, ls ) :
ls, data = getXYData( ls )
ls, trimmedC = getXYData( ls )
trimmed = data.trim( )
if( len( trimmed ) != len( trimmedC ) ) : raise Exception( '%s: at %d len( trimmed ) = %d != len( trimmedC ) = %d' % \
( __file__, count, len( trimmed ), len( trimmedC ) ) )
for i, xy in enumerate( trimmed ) :
compareValues( "x trimmed", count, xy[0], trimmedC[i][0] )
compareValues( "y trimmed", count, xy[1], trimmedC[i][1] )
return( ls )
f = open( os.path.join( CPATH, 'v' ) )
ls = f.readlines( )
f.close( )
count = 0
while( len( ls ) ) :
count += 1
if( count == 6 ) :
ls, dummy = getXYData( ls )
ls, dummy = getXYData( ls )
ls = checkClipping( count, ls )
|
import sys
import sinedon
from sinedon import dbconfig
from sinedon import directq
from leginon import projectdata
from leginon import leginondata
import time
# set direct_query values
# exclude preset lable
excludelist = ()
def checkSinedon():
try:
destination_dbinfo = dbconfig.getConfig('importdata')
except KeyError:
print "Please define impordata module in sinedon.cfg"
sys.exit(1)
if not hasattr(sinedon.dbdatakeeper.DBDataKeeper,'initImported'):
print "sinedon must be imported from myami-dbcopy branch"
print "currently from %s",sinedon.__file__
sys.exit(1)
class Archiver(object):
def __init__(self):
self.status = True # initialize status to o.k.
source_dbinfo = dbconfig.getConfig('projectdata')
destination_dbinfo = dbconfig.getConfig('importdata')
if source_dbinfo['host'] != destination_dbinfo['host']:
self.escape('projectdata and importdata not on the same host')
self.dbhost = source_dbinfo['host']
self.source_dbname = source_dbinfo['db']
self.destination_dbname = destination_dbinfo['db']
def isStatusGood(self):
return self.status
def escape(self,msg=''):
print msg
self.reset()
self.status = False
def reset(self):
'''
reset configuration to source db to avoid confusion
'''
sinedon.setConfig('projectdata', db=self.source_dbname)
def research(self,q,most_recent=False):
'''
Query results from source database. Sorted by entry time. Oldest fist
'''
# configuration must be set before any query
sinedon.setConfig('projectdata', db=self.source_dbname)
if most_recent:
r = q.query(results=1)
if r:
return r[0]
else:
r = q.query()
r.reverse()
return r
def publish(self,results):
'''
Publish query results to destination database.
'''
if not results:
return
# configuration must be set before any query
sinedon.setConfig('projectdata', db=self.destination_dbname)
for q in results:
q.insert(archive=True)
self.reset()
def replaceItem(self,data,key,value):
if data.has_key(key):
data.__setitem__(key, value, force=True)
def avoidExcludedImage(self,fulllist):
shortlist = []
for data in fulllist:
if data['image']['label'] in excludelist:
continue
else:
shortlist.append(data)
return shortlist
def findBrightImageFromNorm(self,normdata):
'''
Find BrighetImageData based on imported NormImageData.
This is needed for older data since BrightImageData was
not linked to AcquisitionImages previously.
'''
if normdata['bright']:
return normdata['bright']
sinedon.setConfig('projectdata', db=self.source_dbname)
timestamp = normdata.timestamp
normcam = normdata['camera']
qcam = projectdata.CameraEMData(dimension=normcam['dimension'],
offset=normcam['offset'], binning=normcam['binning'],
ccdcamera=normcam['ccdcamera'])
qcam['exposure type'] = 'normal'
qcam['energy filtered'] = normcam['energy filtered']
normscope = normdata['scope']
qscope = projectdata.ScopeEMData(tem=normscope['tem'])
qscope['high tension'] = normscope['high tension']
q = projectdata.BrightImageData(camera=qcam,scope=qscope,channel=normdata['channel'])
brightlist = q.query()
for brightdata in brightlist:
if brightdata.timestamp < timestamp:
break
return brightdata
def makequery(self,classname,kwargs):
'''
Make SQL query of projectdata from class name and keyword arguments.
'''
q = getattr(projectdata,classname)()
for key in kwargs.keys():
# projectdata keys never contains '_'
realkey = key.replace('_',' ')
q[realkey] = kwargs[key]
return q
def makeTimeStringFromTimeStamp(self,timestamp):
t = timestamp
return '%04d%02d%02d%02d%02d%02d' % (t.year,t.month,t.day,t.hour,t.minute,t.second)
class ProjectArchiver(Archiver):
'''
Archive a project in projectdb
'''
def __init__(self,projectid):
super(ProjectArchiver,self).__init__()
self.projectid = projectid
self.setSourceProject(projectid)
self.setDestinationProject(projectid)
def setSourceProject(self, projectid):
sinedon.setConfig('projectdata', db=self.source_dbname)
self.source_project = projectdata.projects().direct_query(projectid)
def getSourceProject(self):
'''
Get Source Project data reference.
'''
#This redo the query since the reference often get mapped to
#the destination database for unknown reason after some queries.
self.setSourceProject(self.projectid)
return self.source_project
def setDestinationProject(self, projectid):
self.destination_project = None
sinedon.setConfig('projectdata', db=self.destination_dbname)
project = projectdata.projects().direct_query(projectid)
self.destination_project = project
self.reset()
def getDestinationProject(self):
'''
Get Destination Project data reference.
'''
# Redo query for the same reason as in getSourceProject
self.setDestinationProject(self.projectid)
return self.destination_project
def importProjectValueDependentData(self,dataclassname,value,search_alias):
sinedon.setConfig('projectdata', db=self.source_dbname)
print "Importing %s...." % (dataclassname)
q = getattr(projectdata,dataclassname)()
q[search_alias] = value
results = self.research(q)
self.publish(results)
return results
def importProjectDependentData(self,dataclassname):
source_project = self.getSourceProject()
return self.importProjectValueDependentData(dataclassname,source_project,'project')
def importProject(self):
print "Importing project...."
projectdata = self.getSourceProject()
sinedon.setConfig('projectdata', db=self.destination_dbname)
projectdata.insert(force=False,archive=True)
projectdata = self.getDestinationProject()
if not projectdata:
self.escape("Session Not Inserted Successfully")
return
def importPrivileges(self):
print "Importing privileges...."
q = projectdata.privileges()
results = self.research(q)
self.publish(results)
def importProjectExperiments(self):
projectexperiments = self.importProjectDependentData('projectexperiments')
sessionids = []
# There are cases without session alias
for p in projectexperiments:
if p['session'] is None:
print ' projectexperiment id %d has no session reference' % p.dbid
continue
sessionids.append(p['session'].dbid)
self.importShareExperiments(sessionids)
def importProjectOwners(self):
self.importProjectDependentData('projectowners')
def importProcessingDB(self):
self.importProjectDependentData('processingdb')
def importLeginonDependentData(self,project_classname, leginon_classname, leginon_alias):
dataclassname = project_classname
# Work around leginondata can not be map properly when projectdata is queried for import
q = getattr(leginondata,leginon_classname)()
results = self.research(q)
leginon_ids = map((lambda x: x.dbid),results)
self.importLeginonValueDependentData(project_classname, leginon_ids, leginon_alias)
def importLeginonValueDependentData(self,project_classname, leginon_ids, leginon_alias):
print "Importing %s...." % (project_classname)
q = getattr(projectdata,project_classname)()
results = self.research(q)
for r in results:
if r[leginon_alias] and r[leginon_alias].dbid in leginon_ids:
try:
self.publish([r,])
except:
open('error.log','a')
f.write('%s,%d,%s,%d\n' %(project_classname,r.dbid,leginon_alias,r[leginon_alias].dbid))
f.close()
def importUserDetails(self):
self.importLeginonDependentData('userdetails', 'UserData', 'user')
def importShareExperiments(self,expids):
self.importLeginonValueDependentData('shareexperiments',expids,'experiment')
def importInstall(self):
print "Importing Installation Log...."
source_dbinfo = dbconfig.getConfig('projectdata')
destination_dbinfo = dbconfig.getConfig('importdata')
q = 'select * from install where 1;'
results = directq.complexMysqlQuery('projectdata',q)
q = 'select * from install where 1;'
imported_results = directq.complexMysqlQuery('importdata',q)
for row in results:
keys = row.keys()
values = map((lambda x: row[x]),keys)
if not imported_results:
keystring = '`'+'`,`'.join(keys)+'`'
valuestring = "'"+"','".join(values)+"'"
q = "INSERT into `install` (%s) VALUES (%s)" % (keystring, valuestring)
directq.complexMysqlQuery('importdata',q)
else:
q = "UPDATE `install` SET `value` = '%s' where `install`.`key` = '%s';" % (row['value'],row['key'])
directq.complexMysqlQuery('importdata',q)
def run(self):
'''
STEP 1:
import project and map basic information about it
'''
self.importProject()
self.importPrivileges()
self.importProjectExperiments()
self.importProcessingDB()
self.importProjectOwners()
self.importUserDetails()
self.importInstall()
self.reset()
print ''
if __name__ == '__main__':
import sys
if len(sys.argv) != 2:
print "Usage: python archive_projectdb.py <project id number>"
print ""
print "sinedon.cfg should include a module"
print "[importdata]"
print "db: writable_archive_database for projectdb"
sys.exit()
projectid = int(sys.argv[1])
checkSinedon()
app = ProjectArchiver(projectid)
app.run()
|
# SPDX-License-Identifier: Apache-2.0
import copy
import numbers
from collections import deque, Counter
import ctypes
import json
import numpy as np
from ...common._apply_operation import (
apply_div, apply_reshape, apply_sub, apply_cast, apply_identity, apply_clip)
from ...common._registration import register_converter
from ...common.tree_ensemble import get_default_tree_classifier_attribute_pairs
from ....proto import onnx_proto
def has_tqdm():
try:
from tqdm import tqdm # noqa
return True
except ImportError:
return False
def _translate_split_criterion(criterion):
# If the criterion is true, LightGBM use the left child.
# Otherwise, right child is selected.
if criterion == '<=':
return 'BRANCH_LEQ'
elif criterion == '<':
return 'BRANCH_LT'
elif criterion == '>=':
return 'BRANCH_GTE'
elif criterion == '>':
return 'BRANCH_GT'
elif criterion == '==':
return 'BRANCH_EQ'
elif criterion == '!=':
return 'BRANCH_NEQ'
else:
raise ValueError(
'Unsupported splitting criterion: %s. Only <=, '
'<, >=, and > are allowed.')
def _create_node_id(node_id_pool):
i = 0
while i in node_id_pool:
i += 1
node_id_pool.add(i)
return i
def _parse_tree_structure(tree_id, class_id, learning_rate,
tree_structure, attrs):
"""
The pool of all nodes' indexes created when parsing a single tree.
Different tree use different pools.
"""
node_id_pool = set()
node_pyid_pool = dict()
node_id = _create_node_id(node_id_pool)
node_pyid_pool[id(tree_structure)] = node_id
# The root node is a leaf node.
if ('left_child' not in tree_structure or
'right_child' not in tree_structure):
_parse_node(tree_id, class_id, node_id, node_id_pool, node_pyid_pool,
learning_rate, tree_structure, attrs)
return
left_pyid = id(tree_structure['left_child'])
right_pyid = id(tree_structure['right_child'])
if left_pyid in node_pyid_pool:
left_id = node_pyid_pool[left_pyid]
left_parse = False
else:
left_id = _create_node_id(node_id_pool)
node_pyid_pool[left_pyid] = left_id
left_parse = True
if right_pyid in node_pyid_pool:
right_id = node_pyid_pool[right_pyid]
right_parse = False
else:
right_id = _create_node_id(node_id_pool)
node_pyid_pool[right_pyid] = right_id
right_parse = True
attrs['nodes_treeids'].append(tree_id)
attrs['nodes_nodeids'].append(node_id)
attrs['nodes_featureids'].append(tree_structure['split_feature'])
attrs['nodes_modes'].append(
_translate_split_criterion(tree_structure['decision_type']))
if isinstance(tree_structure['threshold'], str):
try:
attrs['nodes_values'].append(float(tree_structure['threshold']))
except ValueError:
import pprint
text = pprint.pformat(tree_structure)
if len(text) > 100000:
text = text[:100000] + "\n..."
raise TypeError("threshold must be a number not '{}'"
"\n{}".format(tree_structure['threshold'], text))
else:
attrs['nodes_values'].append(tree_structure['threshold'])
# Assume left is the true branch and right is the false branch
attrs['nodes_truenodeids'].append(left_id)
attrs['nodes_falsenodeids'].append(right_id)
if tree_structure['default_left']:
if tree_structure["missing_type"] == 'None' and float(tree_structure['threshold']) < 0.0:
attrs['nodes_missing_value_tracks_true'].append(0)
else:
attrs['nodes_missing_value_tracks_true'].append(1)
else:
attrs['nodes_missing_value_tracks_true'].append(0)
attrs['nodes_hitrates'].append(1.)
if left_parse:
_parse_node(
tree_id, class_id, left_id, node_id_pool, node_pyid_pool,
learning_rate, tree_structure['left_child'], attrs)
if right_parse:
_parse_node(
tree_id, class_id, right_id, node_id_pool, node_pyid_pool,
learning_rate, tree_structure['right_child'], attrs)
def _parse_node(tree_id, class_id, node_id, node_id_pool, node_pyid_pool,
learning_rate, node, attrs):
"""
Parses nodes.
"""
if ((hasattr(node, 'left_child') and hasattr(node, 'right_child')) or
('left_child' in node and 'right_child' in node)):
left_pyid = id(node['left_child'])
right_pyid = id(node['right_child'])
if left_pyid in node_pyid_pool:
left_id = node_pyid_pool[left_pyid]
left_parse = False
else:
left_id = _create_node_id(node_id_pool)
node_pyid_pool[left_pyid] = left_id
left_parse = True
if right_pyid in node_pyid_pool:
right_id = node_pyid_pool[right_pyid]
right_parse = False
else:
right_id = _create_node_id(node_id_pool)
node_pyid_pool[right_pyid] = right_id
right_parse = True
attrs['nodes_treeids'].append(tree_id)
attrs['nodes_nodeids'].append(node_id)
attrs['nodes_featureids'].append(node['split_feature'])
attrs['nodes_modes'].append(
_translate_split_criterion(node['decision_type']))
if isinstance(node['threshold'], str):
try:
attrs['nodes_values'].append(float(node['threshold']))
except ValueError:
import pprint
text = pprint.pformat(node)
if len(text) > 100000:
text = text[:100000] + "\n..."
raise TypeError("threshold must be a number not '{}'"
"\n{}".format(node['threshold'], text))
else:
attrs['nodes_values'].append(node['threshold'])
# Assume left is the true branch
# and right is the false branch
attrs['nodes_truenodeids'].append(left_id)
attrs['nodes_falsenodeids'].append(right_id)
if node['default_left']:
if node['missing_type'] == 'None' and float(node['threshold']) < 0.0:
attrs['nodes_missing_value_tracks_true'].append(0)
else:
attrs['nodes_missing_value_tracks_true'].append(1)
else:
attrs['nodes_missing_value_tracks_true'].append(0)
attrs['nodes_hitrates'].append(1.)
# Recursively dive into the child nodes
if left_parse:
_parse_node(
tree_id, class_id, left_id, node_id_pool, node_pyid_pool,
learning_rate, node['left_child'], attrs)
if right_parse:
_parse_node(
tree_id, class_id, right_id, node_id_pool, node_pyid_pool,
learning_rate, node['right_child'], attrs)
elif hasattr(node, 'left_child') or hasattr(node, 'right_child'):
raise ValueError('Need two branches')
else:
# Node attributes
attrs['nodes_treeids'].append(tree_id)
attrs['nodes_nodeids'].append(node_id)
attrs['nodes_featureids'].append(0)
attrs['nodes_modes'].append('LEAF')
# Leaf node has no threshold.
# A zero is appended but it will never be used.
attrs['nodes_values'].append(0.)
# Leaf node has no child.
# A zero is appended but it will never be used.
attrs['nodes_truenodeids'].append(0)
# Leaf node has no child.
# A zero is appended but it will never be used.
attrs['nodes_falsenodeids'].append(0)
# Leaf node has no split function.
# A zero is appended but it will never be used.
attrs['nodes_missing_value_tracks_true'].append(0)
attrs['nodes_hitrates'].append(1.)
# Leaf attributes
attrs['class_treeids'].append(tree_id)
attrs['class_nodeids'].append(node_id)
attrs['class_ids'].append(class_id)
attrs['class_weights'].append(
float(node['leaf_value']) * learning_rate)
def dump_booster_model(self, num_iteration=None, start_iteration=0,
importance_type='split', verbose=0):
"""
Dumps Booster to JSON format.
Parameters
----------
self: booster
num_iteration : int or None, optional (default=None)
Index of the iteration that should be dumped.
If None, if the best iteration exists, it is dumped; otherwise,
all iterations are dumped.
If <= 0, all iterations are dumped.
start_iteration : int, optional (default=0)
Start index of the iteration that should be dumped.
importance_type : string, optional (default="split")
What type of feature importance should be dumped.
If "split", result contains numbers of times the feature is used in a model.
If "gain", result contains total gains of splits which use the feature.
verbose: dispays progress (usefull for big trees)
Returns
-------
json_repr : dict
JSON format of Booster.
.. note::
This function is inspired from
the *lightgbm* (`dump_model
<https://lightgbm.readthedocs.io/en/latest/pythonapi/
lightgbm.Booster.html#lightgbm.Booster.dump_model>`_.
It creates intermediate structure to speed up the conversion
into ONNX of such model. The function overwrites the
`json.load` to fastly extract nodes.
"""
if getattr(self, 'is_mock', False):
return self.dump_model(), None
from lightgbm.basic import (
_LIB, FEATURE_IMPORTANCE_TYPE_MAPPER, _safe_call,
json_default_with_numpy)
if num_iteration is None:
num_iteration = self.best_iteration
importance_type_int = FEATURE_IMPORTANCE_TYPE_MAPPER[importance_type]
buffer_len = 1 << 20
tmp_out_len = ctypes.c_int64(0)
string_buffer = ctypes.create_string_buffer(buffer_len)
ptr_string_buffer = ctypes.c_char_p(*[ctypes.addressof(string_buffer)])
if verbose >= 2:
print("[dump_booster_model] call CAPI: LGBM_BoosterDumpModel")
_safe_call(_LIB.LGBM_BoosterDumpModel(
self.handle,
ctypes.c_int(start_iteration),
ctypes.c_int(num_iteration),
ctypes.c_int(importance_type_int),
ctypes.c_int64(buffer_len),
ctypes.byref(tmp_out_len),
ptr_string_buffer))
actual_len = tmp_out_len.value
# if buffer length is not long enough, reallocate a buffer
if actual_len > buffer_len:
string_buffer = ctypes.create_string_buffer(actual_len)
ptr_string_buffer = ctypes.c_char_p(
*[ctypes.addressof(string_buffer)])
_safe_call(_LIB.LGBM_BoosterDumpModel(
self.handle,
ctypes.c_int(start_iteration),
ctypes.c_int(num_iteration),
ctypes.c_int(importance_type_int),
ctypes.c_int64(actual_len),
ctypes.byref(tmp_out_len),
ptr_string_buffer))
class Hook(json.JSONDecoder):
"""
Keep track of the progress, stores a copy of all objects with
a decision into a different container in order to walk through
all nodes in a much faster way than going through the architecture.
"""
def __init__(self, *args, info=None, n_trees=None, verbose=0,
**kwargs):
json.JSONDecoder.__init__(
self, object_hook=self.hook, *args, **kwargs)
self.nodes = []
self.buffer = []
self.info = info
self.n_trees = n_trees
self.verbose = verbose
self.stored = 0
if verbose >= 2 and n_trees is not None and has_tqdm():
from tqdm import tqdm
self.loop = tqdm(total=n_trees)
self.loop.set_description("dump_booster")
else:
self.loop = None
def hook(self, obj):
"""
Hook called everytime a JSON object is created.
Keep track of the progress, stores a copy of all objects with
a decision into a different container.
"""
# Every obj goes through this function from the leaves to the root.
if 'tree_info' in obj:
self.info['decision_nodes'] = self.nodes
if self.n_trees is not None and len(self.nodes) != self.n_trees:
raise RuntimeError(
"Unexpected number of trees %d (expecting %d)." % (
len(self.nodes), self.n_trees))
self.nodes = []
if self.loop is not None:
self.loop.close()
if 'tree_structure' in obj:
self.nodes.append(self.buffer)
if self.loop is not None:
self.loop.update(len(self.nodes))
if len(self.nodes) % 10 == 0:
self.loop.set_description(
"dump_booster: %d/%d trees, %d nodes" % (
len(self.nodes), self.n_trees, self.stored))
self.buffer = []
if "decision_type" in obj:
self.buffer.append(obj)
self.stored += 1
return obj
if verbose >= 2:
print("[dump_booster_model] to_json")
info = {}
ret = json.loads(string_buffer.value.decode('utf-8'), cls=Hook,
info=info, n_trees=self.num_trees(), verbose=verbose)
ret['pandas_categorical'] = json.loads(
json.dumps(self.pandas_categorical,
default=json_default_with_numpy))
if verbose >= 2:
print("[dump_booster_model] end.")
return ret, info
def convert_lightgbm(scope, operator, container):
"""
Converters for *lightgbm*.
"""
verbose = getattr(container, 'verbose', 0)
gbm_model = operator.raw_operator
gbm_text, info = dump_booster_model(gbm_model.booster_, verbose=verbose)
modify_tree_for_rule_in_set(gbm_text, use_float=True, verbose=verbose, info=info)
attrs = get_default_tree_classifier_attribute_pairs()
attrs['name'] = operator.full_name
# Create different attributes for classifier and
# regressor, respectively
post_transform = None
if gbm_text['objective'].startswith('binary'):
n_classes = 1
attrs['post_transform'] = 'LOGISTIC'
elif gbm_text['objective'].startswith('multiclass'):
n_classes = gbm_text['num_class']
attrs['post_transform'] = 'SOFTMAX'
elif gbm_text['objective'].startswith('regression'):
n_classes = 1 # Regressor has only one output variable
attrs['post_transform'] = 'NONE'
attrs['n_targets'] = n_classes
elif gbm_text['objective'].startswith(('poisson', 'gamma')):
n_classes = 1 # Regressor has only one output variable
attrs['n_targets'] = n_classes
# 'Exp' is not a supported post_transform value in the ONNX spec yet,
# so we need to add an 'Exp' post transform node to the model
attrs['post_transform'] = 'NONE'
post_transform = "Exp"
else:
raise RuntimeError(
"LightGBM objective should be cleaned already not '{}'.".format(
gbm_text['objective']))
# Use the same algorithm to parse the tree
for i, tree in enumerate(gbm_text['tree_info']):
tree_id = i
class_id = tree_id % n_classes
# tree['shrinkage'] --> LightGbm provides figures with it already.
learning_rate = 1.
_parse_tree_structure(
tree_id, class_id, learning_rate, tree['tree_structure'], attrs)
# Sort nodes_* attributes. For one tree, its node indexes
# should appear in an ascent order in nodes_nodeids. Nodes
# from a tree with a smaller tree index should appear
# before trees with larger indexes in nodes_nodeids.
node_numbers_per_tree = Counter(attrs['nodes_treeids'])
tree_number = len(node_numbers_per_tree.keys())
accumulated_node_numbers = [0] * tree_number
for i in range(1, tree_number):
accumulated_node_numbers[i] = (
accumulated_node_numbers[i - 1] + node_numbers_per_tree[i - 1])
global_node_indexes = []
for i in range(len(attrs['nodes_nodeids'])):
tree_id = attrs['nodes_treeids'][i]
node_id = attrs['nodes_nodeids'][i]
global_node_indexes.append(
accumulated_node_numbers[tree_id] + node_id)
for k, v in attrs.items():
if k.startswith('nodes_'):
merged_indexes = zip(
copy.deepcopy(global_node_indexes), v)
sorted_list = [pair[1]
for pair in sorted(merged_indexes,
key=lambda x: x[0])]
attrs[k] = sorted_list
# Create ONNX object
if (gbm_text['objective'].startswith('binary') or
gbm_text['objective'].startswith('multiclass')):
# Prepare label information for both of TreeEnsembleClassifier
class_type = onnx_proto.TensorProto.STRING
if all(isinstance(i, (numbers.Real, bool, np.bool_))
for i in gbm_model.classes_):
class_type = onnx_proto.TensorProto.INT64
class_labels = [int(i) for i in gbm_model.classes_]
attrs['classlabels_int64s'] = class_labels
elif all(isinstance(i, str) for i in gbm_model.classes_):
class_labels = [str(i) for i in gbm_model.classes_]
attrs['classlabels_strings'] = class_labels
else:
raise ValueError(
'Only string and integer class labels are allowed')
# Create tree classifier
probability_tensor_name = scope.get_unique_variable_name(
'probability_tensor')
label_tensor_name = scope.get_unique_variable_name('label_tensor')
container.add_node(
'TreeEnsembleClassifier', operator.input_full_names,
[label_tensor_name, probability_tensor_name],
op_domain='ai.onnx.ml', **attrs)
prob_tensor = probability_tensor_name
if gbm_model.boosting_type == 'rf':
col_index_name = scope.get_unique_variable_name('col_index')
first_col_name = scope.get_unique_variable_name('first_col')
zeroth_col_name = scope.get_unique_variable_name('zeroth_col')
denominator_name = scope.get_unique_variable_name('denominator')
modified_first_col_name = scope.get_unique_variable_name(
'modified_first_col')
unit_float_tensor_name = scope.get_unique_variable_name(
'unit_float_tensor')
merged_prob_name = scope.get_unique_variable_name('merged_prob')
predicted_label_name = scope.get_unique_variable_name(
'predicted_label')
classes_name = scope.get_unique_variable_name('classes')
final_label_name = scope.get_unique_variable_name('final_label')
container.add_initializer(
col_index_name, onnx_proto.TensorProto.INT64, [], [1])
container.add_initializer(
unit_float_tensor_name, onnx_proto.TensorProto.FLOAT,
[], [1.0])
container.add_initializer(
denominator_name, onnx_proto.TensorProto.FLOAT, [],
[100.0])
container.add_initializer(classes_name, class_type,
[len(class_labels)], class_labels)
container.add_node(
'ArrayFeatureExtractor',
[probability_tensor_name, col_index_name],
first_col_name,
name=scope.get_unique_operator_name(
'ArrayFeatureExtractor'),
op_domain='ai.onnx.ml')
apply_div(scope, [first_col_name, denominator_name],
modified_first_col_name, container, broadcast=1)
apply_sub(
scope, [unit_float_tensor_name, modified_first_col_name],
zeroth_col_name, container, broadcast=1)
container.add_node(
'Concat', [zeroth_col_name, modified_first_col_name],
merged_prob_name,
name=scope.get_unique_operator_name('Concat'), axis=1)
container.add_node(
'ArgMax', merged_prob_name,
predicted_label_name,
name=scope.get_unique_operator_name('ArgMax'), axis=1)
container.add_node(
'ArrayFeatureExtractor', [classes_name, predicted_label_name],
final_label_name,
name=scope.get_unique_operator_name('ArrayFeatureExtractor'),
op_domain='ai.onnx.ml')
apply_reshape(scope, final_label_name,
operator.outputs[0].full_name,
container, desired_shape=[-1, ])
prob_tensor = merged_prob_name
else:
container.add_node('Identity', label_tensor_name,
operator.outputs[0].full_name,
name=scope.get_unique_operator_name('Identity'))
# Convert probability tensor to probability map
# (keys are labels while values are the associated probabilities)
container.add_node('Identity', prob_tensor,
operator.outputs[1].full_name)
else:
# Create tree regressor
output_name = scope.get_unique_variable_name('output')
keys_to_be_renamed = list(
k for k in attrs if k.startswith('class_'))
for k in keys_to_be_renamed:
# Rename class_* attribute to target_*
# because TreeEnsebmleClassifier
# and TreeEnsembleClassifier have different ONNX attributes
attrs['target' + k[5:]] = copy.deepcopy(attrs[k])
del attrs[k]
container.add_node(
'TreeEnsembleRegressor', operator.input_full_names,
output_name, op_domain='ai.onnx.ml', **attrs)
if gbm_model.boosting_type == 'rf':
denominator_name = scope.get_unique_variable_name('denominator')
container.add_initializer(
denominator_name, onnx_proto.TensorProto.FLOAT, [], [100.0])
apply_div(scope, [output_name, denominator_name],
operator.output_full_names, container, broadcast=1)
elif post_transform:
container.add_node(
post_transform,
output_name,
operator.output_full_names,
name=scope.get_unique_operator_name(
post_transform),
)
else:
container.add_node('Identity', output_name,
operator.output_full_names,
name=scope.get_unique_operator_name('Identity'))
def modify_tree_for_rule_in_set(gbm, use_float=False, verbose=0, count=0, # pylint: disable=R1710
info=None):
"""
LightGBM produces sometimes a tree with a node set
to use rule ``==`` to a set of values (= in set),
the values are separated by ``||``.
This function unfold theses nodes.
:param gbm: a tree coming from lightgbm dump
:param use_float: use float otherwise int first
then float if it does not work
:param verbose: verbosity, use *tqdm* to show progress
:param count: number of nodes already changed (origin) before this call
:param info: addition information to speed up this search
:return: number of changed nodes (include *count*)
"""
if 'tree_info' in gbm:
if info is not None:
dec_nodes = info['decision_nodes']
else:
dec_nodes = None
if verbose >= 2 and has_tqdm():
from tqdm import tqdm
loop = tqdm(gbm['tree_info'])
for i, tree in enumerate(loop):
loop.set_description("rules tree %d c=%d" % (i, count))
count = modify_tree_for_rule_in_set(
tree, use_float=use_float, count=count,
info=None if dec_nodes is None else dec_nodes[i])
else:
for i, tree in enumerate(gbm['tree_info']):
count = modify_tree_for_rule_in_set(
tree, use_float=use_float, count=count,
info=None if dec_nodes is None else dec_nodes[i])
return count
if 'tree_structure' in gbm:
return modify_tree_for_rule_in_set(
gbm['tree_structure'], use_float=use_float, count=count,
info=info)
if 'decision_type' not in gbm:
return count
def str2number(val):
if use_float:
return float(val)
else:
try:
return int(val)
except ValueError: # pragma: no cover
return float(val)
if info is None:
def recursive_call(this, c):
if 'left_child' in this:
c = process_node(this['left_child'], count=c)
if 'right_child' in this:
c = process_node(this['right_child'], count=c)
return c
def process_node(node, count):
if 'decision_type' not in node:
return count
if node['decision_type'] != '==':
return recursive_call(node, count)
th = node['threshold']
if not isinstance(th, str):
return recursive_call(node, count)
pos = th.find('||')
if pos == -1:
return recursive_call(node, count)
th1 = str2number(th[:pos])
def doit():
rest = th[pos + 2:]
if '||' not in rest:
rest = str2number(rest)
node['threshold'] = th1
new_node = node.copy()
node['right_child'] = new_node
new_node['threshold'] = rest
doit()
return recursive_call(node, count + 1)
return process_node(gbm, count)
# when info is used
def split_node(node, th, pos):
th1 = str2number(th[:pos])
rest = th[pos + 2:]
if '||' not in rest:
rest = str2number(rest)
app = False
else:
app = True
node['threshold'] = th1
new_node = node.copy()
node['right_child'] = new_node
new_node['threshold'] = rest
return new_node, app
stack = deque(info)
while len(stack) > 0:
node = stack.pop()
if 'decision_type' not in node:
continue # leave
if node['decision_type'] != '==':
continue
th = node['threshold']
if not isinstance(th, str):
continue
pos = th.find('||')
if pos == -1:
continue
new_node, app = split_node(node, th, pos)
count += 1
if app:
stack.append(new_node)
return count
def convert_lgbm_zipmap(scope, operator, container):
zipmap_attrs = {'name': scope.get_unique_operator_name('ZipMap')}
if hasattr(operator, 'classlabels_int64s'):
zipmap_attrs['classlabels_int64s'] = operator.classlabels_int64s
to_type = onnx_proto.TensorProto.INT64
elif hasattr(operator, 'classlabels_strings'):
zipmap_attrs['classlabels_strings'] = operator.classlabels_strings
to_type = onnx_proto.TensorProto.STRING
else:
raise RuntimeError("Unknown class type.")
if to_type == onnx_proto.TensorProto.STRING:
apply_identity(scope, operator.inputs[0].full_name,
operator.outputs[0].full_name, container)
else:
apply_cast(scope, operator.inputs[0].full_name,
operator.outputs[0].full_name, container, to=to_type)
if operator.zipmap:
container.add_node('ZipMap', operator.inputs[1].full_name,
operator.outputs[1].full_name,
op_domain='ai.onnx.ml', **zipmap_attrs)
else:
# This should be apply_identity but optimization fails in
# onnxconverter-common when trying to remove identity nodes.
apply_clip(scope, operator.inputs[1].full_name,
operator.outputs[1].full_name, container,
min=np.array([0], dtype=np.float32),
max=np.array([1], dtype=np.float32))
register_converter('LgbmClassifier', convert_lightgbm)
register_converter('LgbmRegressor', convert_lightgbm)
register_converter('LgbmZipMap', convert_lgbm_zipmap)
|
# Copyright 2015 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for managing folders via the Cloud Resource Manager API."""
from google.cloud.exceptions import NotFound
class Folder(object):
"""Folders are containers for your work on Google Cloud Platform.
.. note::
A :class:`Folder` can also be created via
:meth:`Client.new_folder() \
<google.cloud.resource_manager.client.Client.new_folder>`
To manage labels on a :class:`Folder`::
>>> from google.cloud import resource_manager
>>> client = resource_manager.Client()
>>> folder = client.new_folder('purple-spaceship-123')
>>> folder.labels = {'color': 'purple'}
>>> folder.labels['environment'] = 'production'
>>> folder.update()
See
https://cloud.google.com/resource-manager/reference/rest/v1beta1/folders
:type name: str
:param name: The globally unique ID of the folder.
:type client: :class:`google.cloud.resource_manager.client.Client`
:param client: The Client used with this folder.
:type display_name: str
:param display_name: The display display_name of the folder.
:type labels: dict
:param labels: A list of labels associated with the folder.
"""
def __init__(self, client, name=None, display_name=None, parent=None):
self._client = client
self.name = name
self.display_name = display_name
self.status = None
self.parent = parent
def __repr__(self):
return "<Folder: %r (%r)>" % (self.display_name, self.name)
@classmethod
def from_api_repr(cls, resource, client):
"""Factory: construct a folder given its API representation.
:type resource: dict
:param resource: folder resource representation returned from the API
:type client: :class:`google.cloud.resource_manager.client.Client`
:param client: The Client used with this folder.
:rtype: :class:`google.cloud.resource_manager.folder.Folder`
:returns: The folder created.
"""
folder = cls(name=resource["name"], client=client)
folder.set_properties_from_api_repr(resource)
return folder
def set_properties_from_api_repr(self, resource):
"""Update specific properties from its API representation."""
self.name = resource.get("name")
if "parent" in resource:
self.parent = resource["parent"]
if "lifecycleState" in resource:
self.status = resource["lifecycleState"]
@property
def path(self):
"""URL for the folder (ie, ``'/folders/purple-spaceship-123'``)."""
return "/%s" % (self.name)
def _require_client(self, client):
"""Check client or verify over-ride.
:type client: :class:`google.cloud.resource_manager.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current folder.
:rtype: :class:`google.cloud.resource_manager.client.Client`
:returns: The client passed in or the currently bound client.
"""
if client is None:
client = self._client
return client
def create(self, client=None):
"""API call: create the folder via a ``POST`` request.
See
https://cloud.google.com/resource-manager/reference/rest/v1beta1/folders/create
:type client: :class:`google.cloud.resource_manager.client.Client` or
:data:`NoneType <types.NoneType>`
:param client: the client to use. If not passed, falls back to
the client stored on the current folder.
"""
client = self._require_client(client)
data = {"name": self.name, "displayName": self.display_name}
query_params = {"parent": self.parent}
resp = client._connection_v2.api_request(
method="POST", path="/folders", data=data, query_params=query_params
)
self.set_properties_from_api_repr(resp)
return resp
def reload(self, client=None):
"""API call: reload the folder via a ``GET`` request.
This method will reload the newest metadata for the folder. If you've
created a new :class:`Folder` instance via
:meth:`Client.new_folder() \
<google.cloud.resource_manager.client.Client.new_folder>`,
this method will retrieve folder metadata.
.. warning::
This will overwrite any local changes you've made and not saved
via :meth:`update`.
See
https://cloud.google.com/resource-manager/reference/rest/v1beta1/folders/get
:type client: :class:`google.cloud.resource_manager.client.Client` or
:data:`NoneType <types.NoneType>`
:param client: the client to use. If not passed, falls back to
the client stored on the current folder.
"""
client = self._require_client(client)
# We assume the folder exists. If it doesn't it will raise a NotFound
# exception.
resp = client._connection_v2.api_request(method="GET", path=self.path)
self.set_properties_from_api_repr(resp)
def exists(self, client=None):
"""API call: test the existence of a folder via a ``GET`` request.
See
https://cloud.google.com/resource-manager/reference/rest/v1beta1/folders/get
:type client: :class:`google.cloud.resource_manager.client.Client` or
:data:`NoneType <types.NoneType>`
:param client: the client to use. If not passed, falls back to
the client stored on the current folder.
:rtype: bool
:returns: Boolean indicating existence of the folder.
"""
client = self._require_client(client)
try:
# Note that we have to request the entire resource as the API
# doesn't provide a way tocheck for existence only.
client._connection_v2.api_request(method="GET", path=self.path)
except NotFound:
return False
else:
return True
def update(self, client=None):
"""API call: update the folder via a ``PUT`` request.
See
https://cloud.google.com/resource-manager/reference/rest/v1beta1/folders/update
:type client: :class:`google.cloud.resource_manager.client.Client` or
:data:`NoneType <types.NoneType>`
:param client: the client to use. If not passed, falls back to
the client stored on the current folder.
"""
client = self._require_client(client)
data = {"display_name": self.display_name, "parent": self.parent}
resp = client._connection_v2.api_request(method="PUT", path=self.path, data=data)
self.set_properties_from_api_repr(resp)
def delete(self, client=None, reload_data=False):
"""API call: delete the folder via a ``DELETE`` request.
See
https://cloud.google.com/resource-manager/reference/rest/v1beta1/folders/delete
This actually changes the status (``lifecycleState``) from ``ACTIVE``
to ``DELETE_REQUESTED``.
Later (it's not specified when), the folder will move into the
``DELETE_IN_PROGRESS`` state, which means the deleting has actually
begun.
:type client: :class:`google.cloud.resource_manager.client.Client` or
:data:`NoneType <types.NoneType>`
:param client: the client to use. If not passed, falls back to
the client stored on the current folder.
:type reload_data: bool
:param reload_data: Whether to reload the folder with the latest
state. If you want to get the updated status,
you'll want this set to :data:`True` as the DELETE
method doesn't send back the updated folder.
Default: :data:`False`.
"""
client = self._require_client(client)
client._connection_v2.api_request(method="DELETE", path=self.path)
# If the reload flag is set, reload the folder.
if reload_data:
self.reload()
def undelete(self, client=None, reload_data=False):
"""API call: undelete the folder via a ``POST`` request.
See
https://cloud.google.com/resource-manager/reference/rest/v1beta1/folders/undelete
This actually changes the folder status (``lifecycleState``) from
``DELETE_REQUESTED`` to ``ACTIVE``.
If the folder has already reached a status of ``DELETE_IN_PROGRESS``,
this request will fail and the folder cannot be restored.
:type client: :class:`google.cloud.resource_manager.client.Client` or
:data:`NoneType <types.NoneType>`
:param client: the client to use. If not passed, falls back to
the client stored on the current folder.
:type reload_data: bool
:param reload_data: Whether to reload the folder with the latest
state. If you want to get the updated status,
you'll want this set to :data:`True` as the DELETE
method doesn't send back the updated folder.
Default: :data:`False`.
"""
client = self._require_client(client)
client._connection_v2.api_request(method="POST", path=self.path + ":undelete")
# If the reload flag is set, reload the folder.
if reload_data:
self.reload()
def get_folder(self, client=None, name=None):
"""API call: get the folder via a GET method
See
https://cloud.google.com/resource-manager/reference/rest/v1beta1/folders/create
:type client: :class:`google.cloud.resource_manager.client.Client` or
:data:`NoneType <types.NoneType>`
:param client: the client to use. If not passed, falls back to
the client stored on the current folder.
"""
client = self._require_client(client)
resp = client._connection_v2.api_request(
method="GET", path="/" + name
)
self.set_properties_from_api_repr(resp)
return resp
def get_iam_folder(self, client=None, name=None):
"""API call: get the iam policy folder via a POST method
See
https://cloud.google.com/resource-manager/reference/rest/v1beta1/folders/create
:type client: :class:`google.cloud.resource_manager.client.Client` or
:data:`NoneType <types.NoneType>`
:param client: the client to use. If not passed, falls back to
the client stored on the current folder.
"""
client = self._require_client(client)
resp = client._connection_v2.api_request(
method="POST", path="/" + name + "/:getIamPolicy"
)
self.set_properties_from_api_repr(resp)
return resp
|
from __future__ import absolute_import, division, print_function
import ast
from itertools import repeat
from toolz import merge
from . import arithmetic
from . import math
from .expressions import Expr, Symbol
__all__ = ['exprify']
def generate_methods(node_names, funcs, builder):
def wrapped(cls):
for node_name, func in zip(node_names, funcs):
setattr(cls, 'visit_%s' % node_name, builder(func))
return cls
return wrapped
arithmetic_ops = ['Eq', 'Ne', 'Lt', 'Gt', 'Le', 'Ge', 'BitAnd', 'BitOr',
'Invert', 'USub', 'Add', 'Mult', 'Div', 'FloorDiv', 'Pow', 'Mod',
'Sub']
@generate_methods(arithmetic_ops, arithmetic_ops,
builder=lambda func: lambda self, node: getattr(arithmetic, func))
class BlazeParser(ast.NodeVisitor):
def __init__(self, dtypes, scope):
self.dtypes = dtypes
self.scope = scope
def visit_Compare(self, node):
assert len(node.ops) == 1, 'chained comparisons not supported'
assert len(node.comparators) == 1, 'chained comparisons not supported'
return self.visit(node.ops[0])(self.visit(node.left),
self.visit(node.comparators[0]))
def visit_Num(self, node):
return node.n
def visit_Str(self, node):
return node.s
def visit_Name(self, node):
name = node.id
if name.startswith('__'):
raise ValueError("invalid name %r" % name)
try:
return self.scope[name]
except KeyError:
return Symbol(name, self.dtypes[name])
def visit_BinOp(self, node):
return self.visit(node.op)(self.visit(node.left),
self.visit(node.right))
def visit_UnaryOp(self, node):
op = node.op
operand = node.operand
if isinstance(operand, ast.Num):
return -1 * isinstance(op, ast.USub) * operand.n
return self.visit(op)(self.visit(operand))
def visit_Call(self, node):
assert len(node.args) <= 1, 'only single argument functions allowed'
assert not node.keywords
assert node.starargs is None, 'starargs not allowed'
assert node.kwargs is None, 'kwargs not allowed'
return self.visit(node.func)(*map(self.visit, node.args))
def visit(self, node):
name = node.__class__.__name__
method = 'visit_' + name
visitor = getattr(self, method, None)
if visitor is None:
raise NotImplementedError('%s nodes are not implemented' % name)
return visitor(node)
# Operations like sin, cos, exp, isnan, floor, ceil, ...
math_operators = dict((k, v) for k, v in math.__dict__.items()
if isinstance(v, type) and issubclass(v, Expr))
safe_scope = {'__builtins__': {}, # Python 2
'builtins': {}} # Python 3
def exprify(expr, dtypes):
""" Transform string into scalar expression
>>> from blaze.expr import Expr
>>> expr = exprify('x + y', {'x': 'int64', 'y': 'real'})
>>> expr
x + y
>>> isinstance(expr, Expr)
True
>>> expr.lhs.dshape
dshape("int64")
"""
scope = merge(safe_scope, math_operators)
# use eval mode to raise a SyntaxError if any statements are passed in
parsed = ast.parse(expr, mode='eval')
overlapping_names = set(dtypes) & set(scope)
if overlapping_names:
raise ValueError('overlapping names %s' % overlapping_names)
parser = BlazeParser(dtypes, scope)
return parser.visit(parsed.body)
|
#!/usr/bin/env python3
# coding: utf-8
__author__ = 'cleardusk'
import numpy as np
from math import sqrt
import scipy.io as sio
import matplotlib.pyplot as plt
from .ddfa import reconstruct_vertex
def get_suffix(filename):
"""a.jpg -> jpg"""
pos = filename.rfind('.')
if pos == -1:
return ''
return filename[pos:]
def crop_img(img, roi_box):
h, w = img.shape[:2]
sx, sy, ex, ey = [int(round(_)) for _ in roi_box]
dh, dw = ey - sy, ex - sx
if len(img.shape) == 3:
res = np.zeros((dh, dw, 3), dtype=np.uint8)
else:
res = np.zeros((dh, dw), dtype=np.uint8)
if sx < 0:
sx, dsx = 0, -sx
else:
dsx = 0
if ex > w:
ex, dex = w, dw - (ex - w)
else:
dex = dw
if sy < 0:
sy, dsy = 0, -sy
else:
dsy = 0
if ey > h:
ey, dey = h, dh - (ey - h)
else:
dey = dh
res[dsy:dey, dsx:dex] = img[sy:ey, sx:ex]
return res
def calc_hypotenuse(pts):
bbox = [min(pts[0, :]), min(pts[1, :]), max(pts[0, :]), max(pts[1, :])]
center = [(bbox[0] + bbox[2]) / 2, (bbox[1] + bbox[3]) / 2]
radius = max(bbox[2] - bbox[0], bbox[3] - bbox[1]) / 2
bbox = [center[0] - radius, center[1] - radius, center[0] + radius, center[1] + radius]
llength = sqrt((bbox[2] - bbox[0]) ** 2 + (bbox[3] - bbox[1]) ** 2)
return llength / 3
def parse_roi_box_from_landmark(pts):
"""calc roi box from landmark"""
bbox = [min(pts[0, :]), min(pts[1, :]), max(pts[0, :]), max(pts[1, :])]
center = [(bbox[0] + bbox[2]) / 2, (bbox[1] + bbox[3]) / 2]
radius = max(bbox[2] - bbox[0], bbox[3] - bbox[1]) / 2
bbox = [center[0] - radius, center[1] - radius, center[0] + radius, center[1] + radius]
llength = sqrt((bbox[2] - bbox[0]) ** 2 + (bbox[3] - bbox[1]) ** 2)
center_x = (bbox[2] + bbox[0]) / 2
center_y = (bbox[3] + bbox[1]) / 2
roi_box = [0] * 4
roi_box[0] = center_x - llength / 2
roi_box[1] = center_y - llength / 2
roi_box[2] = roi_box[0] + llength
roi_box[3] = roi_box[1] + llength
return roi_box
def parse_roi_box_from_bbox(bbox):
left, top, right, bottom = bbox
old_size = (right - left + bottom - top) / 2
center_x = right - (right - left) / 2.0
center_y = bottom - (bottom - top) / 2.0 + old_size * 0.14
size = int(old_size * 1.58)
roi_box = [0] * 4
roi_box[0] = center_x - size / 2
roi_box[1] = center_y - size / 2
roi_box[2] = roi_box[0] + size
roi_box[3] = roi_box[1] + size
return roi_box
def dump_to_ply(vertex, tri, wfp):
header = """ply
format ascii 1.0
element vertex {}
property float x
property float y
property float z
element face {}
property list uchar int vertex_indices
end_header"""
n_vertex = vertex.shape[1]
n_face = tri.shape[1]
header = header.format(n_vertex, n_face)
with open(wfp, 'w') as f:
f.write(header + '\n')
for i in range(n_vertex):
x, y, z = vertex[:, i]
f.write('{:.4f} {:.4f} {:.4f}\n'.format(x, y, z))
for i in range(n_face):
idx1, idx2, idx3 = tri[:, i]
f.write('3 {} {} {}\n'.format(idx1 - 1, idx2 - 1, idx3 - 1))
print('Dump tp {}'.format(wfp))
def dump_vertex(vertex, wfp):
sio.savemat(wfp, {'vertex': vertex})
print('Dump to {}'.format(wfp))
def _predict_vertices(param, roi_bbox, dense, transform=True):
vertex = reconstruct_vertex(param, dense=dense)
sx, sy, ex, ey = roi_bbox
scale_x = (ex - sx) / 120
scale_y = (ey - sy) / 120
vertex[0, :] = vertex[0, :] * scale_x + sx
vertex[1, :] = vertex[1, :] * scale_y + sy
s = (scale_x + scale_y) / 2
vertex[2, :] *= s
return vertex
def predict_68pts(param, roi_box):
return _predict_vertices(param, roi_box, dense=False)
def predict_dense(param, roi_box):
return _predict_vertices(param, roi_box, dense=True)
def draw_landmarks(img, pts, style='fancy', wfp=None, show_flg=False, **kwargs):
"""Draw landmarks using matplotlib"""
height, width = img.shape[:2]
plt.figure(figsize=(12, height / width * 12))
plt.imshow(img[:, :, ::-1])
plt.subplots_adjust(left=0, right=1, top=1, bottom=0)
plt.axis('off')
if not type(pts) in [tuple, list]:
pts = [pts]
for i in range(len(pts)):
if style == 'simple':
plt.plot(pts[i][0, :], pts[i][1, :], 'o', markersize=4, color='g')
elif style == 'fancy':
alpha = 0.8
markersize = 4
lw = 1.5
color = kwargs.get('color', 'w')
markeredgecolor = kwargs.get('markeredgecolor', 'black')
nums = [0, 17, 22, 27, 31, 36, 42, 48, 60, 68]
# close eyes and mouths
plot_close = lambda i1, i2: plt.plot([pts[i][0, i1], pts[i][0, i2]], [pts[i][1, i1], pts[i][1, i2]],
color=color, lw=lw, alpha=alpha - 0.1)
plot_close(41, 36)
plot_close(47, 42)
plot_close(59, 48)
plot_close(67, 60)
for ind in range(len(nums) - 1):
l, r = nums[ind], nums[ind + 1]
plt.plot(pts[i][0, l:r], pts[i][1, l:r], color=color, lw=lw, alpha=alpha - 0.1)
plt.plot(pts[i][0, l:r], pts[i][1, l:r], marker='o', linestyle='None', markersize=markersize,
color=color,
markeredgecolor=markeredgecolor, alpha=alpha)
if wfp is not None:
plt.savefig(wfp, dpi=200)
print('Save visualization result to {}'.format(wfp))
if show_flg:
plt.show()
def get_colors(image, vertices):
[h, w, _] = image.shape
vertices[0, :] = np.minimum(np.maximum(vertices[0, :], 0), w - 1) # x
vertices[1, :] = np.minimum(np.maximum(vertices[1, :], 0), h - 1) # y
ind = np.round(vertices).astype(np.int32)
colors = image[ind[1, :], ind[0, :], :] # n x 3
return colors
def write_obj_with_colors(obj_name, vertices, triangles, colors):
triangles = triangles.copy() # meshlab start with 1
if obj_name.split('.')[-1] != 'obj':
obj_name = obj_name + '.obj'
# write obj
with open(obj_name, 'w') as f:
# write vertices & colors
for i in range(vertices.shape[1]):
s = 'v {:.4f} {:.4f} {:.4f} {} {} {}\n'.format(vertices[1, i], vertices[0, i], vertices[2, i], colors[i, 2],
colors[i, 1], colors[i, 0])
f.write(s)
# write f: ver ind/ uv ind
for i in range(triangles.shape[1]):
s = 'f {} {} {}\n'.format(triangles[0, i], triangles[1, i], triangles[2, i])
f.write(s)
def main():
pass
if __name__ == '__main__':
main()
|
import kanp
from kwmo.lib.kwmo_kcd_client import KcdClient
from pylons import config
from kwmo.lib.config import get_cached_kcd_external_conf_object
from kwmo.model.kcd.kcd_user import KcdUser
#KANP_EMAIL_NOTIF_FLAG = 1
#KANP_EMAIL_SUMMARY_FLAG = 2
from kflags import Flags
class UserWorkspaceSettings:
def __init__(self, user_id, workspace_id):
self._user_id = int(user_id)
self._workspace_id = int(workspace_id)
#initialize to unloaded
self._loaded = False
self._notif_policy = 0
self._new_notif_policy = 0
pass
def save(self):
kc = KcdClient(get_cached_kcd_external_conf_object())
# TODO: check for status code from Kcd, and handle errors
kc.save_notification_policy(self._workspace_id, self._user_id, self._new_notif_policy)
self._notif_policy = self._new_notif_policy
pass
def load(self):
if not self._loaded:
#TODO: bullet-proof code, assert kcd_user and return an error code in case of none
kcd_user = KcdUser.get_by(user_id = self._user_id, kws_id = self._workspace_id)
self._notif_policy = kcd_user.notif_policy
self._loaded = True
pass
def setNotificationsEnabled(self, value):
if value:
self._new_notif_policy = self._new_notif_policy | kanp.KANP_EMAIL_NOTIF_FLAG
pass
def setSummaryEnabled(self, value):
if value:
self._new_notif_policy = self._new_notif_policy | kanp.KANP_EMAIL_SUMMARY_FLAG
pass
def getNotificationsEnabled(self):
self.load()
return (self._notif_policy & kanp.KANP_EMAIL_NOTIF_FLAG) == kanp.KANP_EMAIL_NOTIF_FLAG
def getSummaryEnabled(self):
self.load()
return (self._notif_policy & kanp.KANP_EMAIL_SUMMARY_FLAG) == kanp.KANP_EMAIL_SUMMARY_FLAG
|
from flask import render_template
from flask import Flask,request
import logging
app = Flask(__name__)
@app.route('/')
def index():
logging.info('>>>>>>>>>>>>>')
ip = request.remote_addr
print(ip)
logging.info(ip)
logging.info('<<<<<<<<<<<<<')
return render_template("./index.html")
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True, port='80')
|
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from abc import abstractmethod
from collections import defaultdict
from pants.base.exceptions import TaskError
from pants.task.task import Task
class MutexTaskMixin(Task):
"""A mixin that can be subclassed to form a mutual exclusion group of tasks.
Generally, you'd subclass MutexTaskMixin and override `mutex_base` to return the (abstract) type
of your mutual exclusion group tasks, for example::
class LogViewerTaskMixin(MutexTaskMixin):
'''Pops up an interactive log viewing console.
Log viewers pop up their console for binary targets they know how to execute and scrape
logs from.
'''
@classmethod
def mutex_base(cls):
return LogViewerTaskMixin
Then all tasks that implemented an interactive log viewer would mix in LogViewerTaskMixin and
provide concrete implementations for `select_targets` that pick out the binary targets they know
how to handle and `execute_for` to execute those binaries and scrape their logs.
Assuming all these tasks were registered under the `logview` goal then each task could be assured
it would be executed to the exclusion of all other LogViewerTaskMixins in any
`./pants logview ...` run.
"""
class NoActivationsError(TaskError):
"""Indicates a mutexed task group had no tasks run."""
class IncompatibleActivationsError(TaskError):
"""Indicates a mutexed task group had more than one task eligible to run."""
_implementations = defaultdict(set)
@classmethod
def reset_implementations(cls):
"""Resets all mutex implementation registrations.
Only intended for testing.
"""
cls._implementations.clear()
@classmethod
def mutex_base(cls):
"""Returns the root class in a mutex group.
Members of the group will all mix in this class and it should implement this method concretely
to return itself.
"""
raise NotImplementedError()
@classmethod
def prepare(cls, options, round_manager):
super(MutexTaskMixin, cls).prepare(options, round_manager)
cls._implementations[cls.mutex_base()].add(cls)
@classmethod
def select_targets(cls, target):
"""Returns `True` if the given target is operated on by this mutex group member."""
raise NotImplementedError()
@classmethod
def _selected_by_other_impl(cls, target):
for impl in cls._implementations[cls.mutex_base()]:
if impl != cls and impl.select_targets(target):
return True
return False
@abstractmethod
def execute_for(self, targets):
"""Executes the current mutex member with its selected targets.
When this method is called, its an indication that the current mutex member is the only member
active in this pants run.
:param targets: All the targets reachable in this run selected by this mutex member's
`select_targets` method.
"""
def execute(self):
targets = self._require_homogeneous_targets(self.select_targets, self._selected_by_other_impl)
if targets:
return self.execute_for(targets)
# Else a single other mutex impl is executing.
def _require_homogeneous_targets(self, accept_predicate, reject_predicate):
"""Ensures that there is no ambiguity in the context according to the given predicates.
If any targets in the context satisfy the accept_predicate, and no targets satisfy the
reject_predicate, returns the accepted targets.
If no targets satisfy the accept_predicate, returns None.
Otherwise throws TaskError.
"""
if len(self.context.target_roots) == 0:
raise self.NoActivationsError('No target specified.')
accepted = self.context.targets(accept_predicate)
rejected = self.context.targets(reject_predicate)
if len(accepted) == 0:
# no targets were accepted, regardless of rejects
return None
elif len(rejected) == 0:
# we have at least one accepted target, and no rejected targets
return accepted
else:
# both accepted and rejected targets
# TODO: once https://github.com/pantsbuild/pants/issues/425 lands, we should add
# language-specific flags that would resolve the ambiguity here
def render_target(target):
return '{} (a {})'.format(target.address.reference(), target.type_alias)
raise self.IncompatibleActivationsError('Mutually incompatible targets specified: {} vs {} '
'(and {} others)'
.format(render_target(accepted[0]),
render_target(rejected[0]),
len(accepted) + len(rejected) - 2))
|
# -*- coding: utf-8 -*-
# Open Source Initiative OSI - The MIT License (MIT):Licensing
#
# The MIT License (MIT)
# Copyright (c) 2012 DotCloud Inc (opensource@dotcloud.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import gevent
import zerorpc
from testutils import random_ipc_endpoint
class EchoModule(object):
def __init__(self, trigger=None):
self.last_msg = None
self._trigger = trigger
def echo(self, msg):
self.last_msg = "echo: " + msg
if self._trigger:
self._trigger.set()
return self.last_msg
@zerorpc.stream
def echoes(self, msg):
self.last_msg = "echo: " + msg
for i in xrange(0, 3):
yield self.last_msg
def crash(self, msg):
try:
self.last_msg = "raise: " + msg
raise RuntimeError("BrokenEchoModule")
finally:
if self._trigger:
self._trigger.set()
@zerorpc.stream
def echoes_crash(self, msg):
self.crash(msg)
def timeout(self, msg):
self.last_msg = "timeout: " + msg
gevent.sleep(2)
def test_hook_client_before_request():
class ClientBeforeRequestMiddleware(object):
def __init__(self):
self.called = False
def client_before_request(self, event):
self.called = True
self.method = event.name
zero_ctx = zerorpc.Context()
endpoint = random_ipc_endpoint()
test_server = zerorpc.Server(EchoModule(), context=zero_ctx)
test_server.bind(endpoint)
test_server_task = gevent.spawn(test_server.run)
test_client = zerorpc.Client(context=zero_ctx)
test_client.connect(endpoint)
assert test_client.echo("test") == "echo: test"
test_middleware = ClientBeforeRequestMiddleware()
zero_ctx.register_middleware(test_middleware)
assert test_middleware.called == False
assert test_client.echo("test") == "echo: test"
assert test_middleware.called == True
assert test_middleware.method == 'echo'
test_server.stop()
test_server_task.join()
class ClientAfterRequestMiddleware(object):
def __init__(self):
self.called = False
def client_after_request(self, req_event, rep_event, exception):
self.called = True
assert req_event is not None
assert req_event.name == "echo" or req_event.name == "echoes"
self.retcode = rep_event.name
assert exception is None
def test_hook_client_after_request():
zero_ctx = zerorpc.Context()
endpoint = random_ipc_endpoint()
test_server = zerorpc.Server(EchoModule(), context=zero_ctx)
test_server.bind(endpoint)
test_server_task = gevent.spawn(test_server.run)
test_client = zerorpc.Client(context=zero_ctx)
test_client.connect(endpoint)
assert test_client.echo("test") == "echo: test"
test_middleware = ClientAfterRequestMiddleware()
zero_ctx.register_middleware(test_middleware)
assert test_middleware.called == False
assert test_client.echo("test") == "echo: test"
assert test_middleware.called == True
assert test_middleware.retcode == 'OK'
test_server.stop()
test_server_task.join()
def test_hook_client_after_request_stream():
zero_ctx = zerorpc.Context()
endpoint = random_ipc_endpoint()
test_server = zerorpc.Server(EchoModule(), context=zero_ctx)
test_server.bind(endpoint)
test_server_task = gevent.spawn(test_server.run)
test_client = zerorpc.Client(context=zero_ctx)
test_client.connect(endpoint)
it = test_client.echoes("test")
assert next(it) == "echo: test"
for echo in it:
assert echo == "echo: test"
test_middleware = ClientAfterRequestMiddleware()
zero_ctx.register_middleware(test_middleware)
assert test_middleware.called == False
it = test_client.echoes("test")
assert next(it) == "echo: test"
assert test_middleware.called == False
for echo in it:
assert echo == "echo: test"
assert test_middleware.called == True
assert test_middleware.retcode == 'STREAM_DONE'
test_server.stop()
test_server_task.join()
def test_hook_client_after_request_timeout():
class ClientAfterRequestMiddleware(object):
def __init__(self):
self.called = False
def client_after_request(self, req_event, rep_event, exception):
self.called = True
assert req_event is not None
assert req_event.name == "timeout"
assert rep_event is None
zero_ctx = zerorpc.Context()
test_middleware = ClientAfterRequestMiddleware()
zero_ctx.register_middleware(test_middleware)
endpoint = random_ipc_endpoint()
test_server = zerorpc.Server(EchoModule(), context=zero_ctx)
test_server.bind(endpoint)
test_server_task = gevent.spawn(test_server.run)
test_client = zerorpc.Client(timeout=1, context=zero_ctx)
test_client.connect(endpoint)
assert test_middleware.called == False
try:
test_client.timeout("test")
except zerorpc.TimeoutExpired as ex:
assert test_middleware.called == True
assert "timeout" in ex.args[0]
test_server.stop()
test_server_task.join()
class ClientAfterFailedRequestMiddleware(object):
def __init__(self):
self.called = False
def client_after_request(self, req_event, rep_event, exception):
assert req_event is not None
assert req_event.name == "crash" or req_event.name == "echoes_crash"
self.called = True
assert isinstance(exception, zerorpc.RemoteError)
assert exception.name == 'RuntimeError'
assert 'BrokenEchoModule' in exception.msg
assert rep_event.name == 'ERR'
def test_hook_client_after_request_remote_error():
zero_ctx = zerorpc.Context()
test_middleware = ClientAfterFailedRequestMiddleware()
zero_ctx.register_middleware(test_middleware)
endpoint = random_ipc_endpoint()
test_server = zerorpc.Server(EchoModule(), context=zero_ctx)
test_server.bind(endpoint)
test_server_task = gevent.spawn(test_server.run)
test_client = zerorpc.Client(timeout=1, context=zero_ctx)
test_client.connect(endpoint)
assert test_middleware.called == False
try:
test_client.crash("test")
except zerorpc.RemoteError:
assert test_middleware.called == True
test_server.stop()
test_server_task.join()
def test_hook_client_after_request_remote_error_stream():
zero_ctx = zerorpc.Context()
test_middleware = ClientAfterFailedRequestMiddleware()
zero_ctx.register_middleware(test_middleware)
endpoint = random_ipc_endpoint()
test_server = zerorpc.Server(EchoModule(), context=zero_ctx)
test_server.bind(endpoint)
test_server_task = gevent.spawn(test_server.run)
test_client = zerorpc.Client(timeout=1, context=zero_ctx)
test_client.connect(endpoint)
assert test_middleware.called == False
try:
test_client.echoes_crash("test")
except zerorpc.RemoteError:
assert test_middleware.called == True
test_server.stop()
test_server_task.join()
def test_hook_client_handle_remote_error_inspect():
class ClientHandleRemoteErrorMiddleware(object):
def __init__(self):
self.called = False
def client_handle_remote_error(self, event):
self.called = True
test_middleware = ClientHandleRemoteErrorMiddleware()
zero_ctx = zerorpc.Context()
zero_ctx.register_middleware(test_middleware)
endpoint = random_ipc_endpoint()
test_server = zerorpc.Server(EchoModule(), context=zero_ctx)
test_server.bind(endpoint)
test_server_task = gevent.spawn(test_server.run)
test_client = zerorpc.Client(context=zero_ctx)
test_client.connect(endpoint)
assert test_middleware.called == False
try:
test_client.crash("test")
except zerorpc.RemoteError as ex:
assert test_middleware.called == True
assert ex.name == "RuntimeError"
test_server.stop()
test_server_task.join()
# This is a seriously broken idea, but possible nonetheless
class ClientEvalRemoteErrorMiddleware(object):
def __init__(self):
self.called = False
def client_handle_remote_error(self, event):
self.called = True
name, msg, tb = event.args
etype = eval(name)
e = etype(tb)
return e
def test_hook_client_handle_remote_error_eval():
test_middleware = ClientEvalRemoteErrorMiddleware()
zero_ctx = zerorpc.Context()
zero_ctx.register_middleware(test_middleware)
endpoint = random_ipc_endpoint()
test_server = zerorpc.Server(EchoModule(), context=zero_ctx)
test_server.bind(endpoint)
test_server_task = gevent.spawn(test_server.run)
test_client = zerorpc.Client(context=zero_ctx)
test_client.connect(endpoint)
assert test_middleware.called == False
try:
test_client.crash("test")
except RuntimeError as ex:
assert test_middleware.called == True
assert "BrokenEchoModule" in ex.args[0]
test_server.stop()
test_server_task.join()
def test_hook_client_handle_remote_error_eval_stream():
test_middleware = ClientEvalRemoteErrorMiddleware()
zero_ctx = zerorpc.Context()
zero_ctx.register_middleware(test_middleware)
endpoint = random_ipc_endpoint()
test_server = zerorpc.Server(EchoModule(), context=zero_ctx)
test_server.bind(endpoint)
test_server_task = gevent.spawn(test_server.run)
test_client = zerorpc.Client(context=zero_ctx)
test_client.connect(endpoint)
assert test_middleware.called == False
try:
test_client.echoes_crash("test")
except RuntimeError as ex:
assert test_middleware.called == True
assert "BrokenEchoModule" in ex.args[0]
test_server.stop()
test_server_task.join()
def test_hook_client_after_request_custom_error():
# This is a seriously broken idea, but possible nonetheless
class ClientEvalInspectRemoteErrorMiddleware(object):
def __init__(self):
self.called = False
def client_handle_remote_error(self, event):
name, msg, tb = event.args
etype = eval(name)
e = etype(tb)
return e
def client_after_request(self, req_event, rep_event, exception):
assert req_event is not None
assert req_event.name == "crash"
self.called = True
assert isinstance(exception, RuntimeError)
test_middleware = ClientEvalInspectRemoteErrorMiddleware()
zero_ctx = zerorpc.Context()
zero_ctx.register_middleware(test_middleware)
endpoint = random_ipc_endpoint()
test_server = zerorpc.Server(EchoModule(), context=zero_ctx)
test_server.bind(endpoint)
test_server_task = gevent.spawn(test_server.run)
test_client = zerorpc.Client(context=zero_ctx)
test_client.connect(endpoint)
assert test_middleware.called == False
try:
test_client.crash("test")
except RuntimeError as ex:
assert test_middleware.called == True
assert "BrokenEchoModule" in ex.args[0]
test_server.stop()
test_server_task.join()
|
from sys import argv
from bs4 import BeautifulSoup
import requests
import datetime
url = 'http://www.njtransit.com/sf/sf_servlet.srv?hdnPageAction=TrainSchedulesFrom'
pu_code = "124_PRIN"
ny_code = "105_BNTN"
prs = "Princeton"
nyp = "New York Penn Station"
# get date
today = datetime.date.today()
str_date = today.__format__("%m/%d/%Y")
# trip info
toNY_dict = {'selOrigin': pu_code, 'selDestination': ny_code, 'datepicker': str_date, 'OriginDescription': prs, 'DestDescription': nyp}
toPU_dict = {'selOrigin': ny_code, 'selDestination': pu_code, 'datepicker': str_date, 'OriginDescription': nyp, 'DestDescription': prs}
# get to webpage with data for the day
with requests.Session() as re:
toNY = re.post(url, data=toNY_dict)
toPU = re.post(url, data=toPU_dict)
#text = open('njtransittime2.txt')
toPUhtml = toPU.text
toNYhtml = toNY.text
title = str(today) + "hello" #str(destination)
#soup = BeautifulSoup(html)
soup = BeautifulSoup(toPUhtml)
#Reads in html file and name of destination and outputs csv file with comma spliced file of train information
def scrape(html,destination):
title = str(today) + str(destination)
soup = BeautifulSoup(html)
# Improvements: instead of being so hacky with 10 search for td
# Gather all lines in table
table1 = soup.find_all("tr")
table2 = table1[10] #table1[10] contains the table of interest
table3 = table2.find_all('span')
# Create 7 lists
origin = [] #Times for departure at origin
origintrain = []
transferarrive = [] #Times for arrival at transfer
transferdepart = [] #Time for departure at transfer
transfertrain = [] #Train or bus number
destination = [] #Time of arrival at destination
total = [] #Total time of Travel
#Create 3 Columns of Text File
origin.append("Origin Departure") #Times for departure at origin
origintrain.append("Origin Train")
transferarrive.append("Transfer Arrival") #Times for arrival at transfer
transferdepart.append("Transfer Departure") #Time for departure at transfer
transfertrain.append("Transfer Train or Bus")
destination.append("Destination Arrival") #Time of arrival at destination
total.append("Total Travel Time") #Total time of Travel
#Store 4 columns into 4 lists
#Regex and pull approapriate data
for i in range(4, len(table3)-3, 4):
#origin.append(str(table3[i].text)[0:len(table3[i].text)])
origin.append(str(table3[i].text)[0:8])
origintrain.append(str(table3[i].text)[-5:])
transferarrive.append(str(table3[i+1].text)[7:15])
transferdepart.append(str(table3[i+1].text)[39:48])
transfertrain.append(str(table3[i+1].text)[-5:])
destination.append(str(table3[i+2].text)[0:len(table3[i+2].text)])
total.append(str(table3[i+3].text)[0:len(table3[i+3].text)])
text_file = open(str(title) + ".csv", "w")
Dict = {'origin': origin[1:], 'transferarrive' : transferarrive[1:], 'transferdepart': transferdepart[1:], 'destination':destination[1:]}
#Write to text filed
for lines in range(len(origin)):
text_file.write(origin[lines] + "," + origintrain[lines] + "," + transferarrive[lines] + "," + transferdepart[lines] + "," + transfertrain[lines] + "," + destination[lines] + "\n")
text_file.close()
return Dict
#Create csv files for to Princeton and to New York
scrape(toPUhtml, 'PU')
scrape(toNYhtml, 'NY')
|
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2013 NTT MCL Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from django.conf import settings
from django.core.urlresolvers import reverse
from django.core.urlresolvers import reverse_lazy
from django.http import HttpResponse
from django.utils.translation import ugettext_lazy as _
from django.views.generic import View
from horizon import exceptions
from horizon import tabs
from horizon.utils.lazy_encoder import LazyTranslationEncoder
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.network_topology import forms
from openstack_dashboard.dashboards.project.network_topology.instances \
import tables as instances_tables
from openstack_dashboard.dashboards.project.network_topology.networks \
import tables as networks_tables
from openstack_dashboard.dashboards.project.network_topology.ports \
import tables as ports_tables
from openstack_dashboard.dashboards.project.network_topology.routers \
import tables as routers_tables
from openstack_dashboard.dashboards.project.network_topology.subnets \
import tables as subnets_tables
from openstack_dashboard.dashboards.project.network_topology \
import tabs as topology_tabs
from openstack_dashboard.dashboards.project.network_topology import utils
from openstack_dashboard.dashboards.project.instances import\
console as i_console
from openstack_dashboard.dashboards.project.instances.tables import \
STATUS_DISPLAY_CHOICES as instance_choices
from openstack_dashboard.dashboards.project.instances import\
views as i_views
from openstack_dashboard.dashboards.project.instances.workflows import\
create_instance as i_workflows
from openstack_dashboard.dashboards.project.networks.subnets import\
views as s_views
from openstack_dashboard.dashboards.project.networks.subnets import\
workflows as s_workflows
from openstack_dashboard.dashboards.project.networks.tables import \
DISPLAY_CHOICES as network_display_choices
from openstack_dashboard.dashboards.project.networks.tables import \
STATUS_DISPLAY_CHOICES as network_choices
from openstack_dashboard.dashboards.project.networks import\
views as n_views
from openstack_dashboard.dashboards.project.networks import\
workflows as n_workflows
from openstack_dashboard.dashboards.project.routers.ports.tables import \
DISPLAY_CHOICES as ports_choices
from openstack_dashboard.dashboards.project.routers.ports.tables import \
STATUS_DISPLAY_CHOICES as ports_status_choices
from openstack_dashboard.dashboards.project.routers.ports import\
views as p_views
from openstack_dashboard.dashboards.project.routers.tables import \
ADMIN_STATE_DISPLAY_CHOICES as routers_admin_choices
from openstack_dashboard.dashboards.project.routers.tables import \
STATUS_DISPLAY_CHOICES as routers_status_choices
from openstack_dashboard.dashboards.project.routers import\
views as r_views
# List of known server statuses that wont connect to the console
console_invalid_status = {
'shutoff', 'suspended', 'resize', 'verify_resize',
'revert_resize', 'migrating', 'build', 'shelved',
'shelved_offloaded'}
class TranslationHelper(object):
"""Helper class to provide the translations.
This allows the network topology to access the translated strings
for various resources defined in other parts of the code.
"""
def __init__(self):
# turn translation tuples into dicts for easy access
self.instance = dict(instance_choices)
self.network = dict(network_choices)
self.network.update(dict(network_display_choices))
self.router = dict(routers_admin_choices)
self.router.update(dict(routers_status_choices))
self.port = dict(ports_choices)
self.port.update(dict(ports_status_choices))
# and turn all the keys into Uppercase for simple access
self.instance = {k.upper(): v for k, v in self.instance.items()}
self.network = {k.upper(): v for k, v in self.network.items()}
self.router = {k.upper(): v for k, v in self.router.items()}
self.port = {k.upper(): v for k, v in self.port.items()}
class NTAddInterfaceView(p_views.AddInterfaceView):
success_url = "horizon:project:network_topology:index"
failure_url = "horizon:project:network_topology:index"
def get_success_url(self):
return reverse("horizon:project:network_topology:index")
def get_context_data(self, **kwargs):
context = super(NTAddInterfaceView, self).get_context_data(**kwargs)
context['form_url'] = 'horizon:project:network_topology:interface'
return context
class NTCreateRouterView(r_views.CreateView):
form_class = forms.NTCreateRouterForm
success_url = reverse_lazy("horizon:project:network_topology:index")
submit_url = reverse_lazy("horizon:project:network_topology:createrouter")
page_title = _("Create a Router")
class NTCreateNetwork(n_workflows.CreateNetwork):
def get_success_url(self):
return reverse("horizon:project:network_topology:index")
def get_failure_url(self):
return reverse("horizon:project:network_topology:index")
class NTCreateNetworkView(n_views.CreateView):
workflow_class = NTCreateNetwork
class NTLaunchInstance(i_workflows.LaunchInstance):
success_url = "horizon:project:network_topology:index"
class NTLaunchInstanceView(i_views.LaunchInstanceView):
workflow_class = NTLaunchInstance
class NTCreateSubnet(s_workflows.CreateSubnet):
def get_success_url(self):
return reverse("horizon:project:network_topology:index")
def get_failure_url(self):
return reverse("horizon:project:network_topology:index")
class NTCreateSubnetView(s_views.CreateView):
workflow_class = NTCreateSubnet
class InstanceView(i_views.IndexView):
table_class = instances_tables.InstancesTable
template_name = 'project/network_topology/iframe.html'
def get_data(self):
self._more = False
# Get instance by id, return a list of one instance
# If failed to retrieve the instance, return an empty list
try:
instance_id = self.request.GET.get("id", "")
instance = api.nova.server_get(self.request, instance_id)
return [instance]
except Exception:
exceptions.handle(self.request,
_('Unable to retrieve the instance.'))
return []
class RouterView(r_views.IndexView):
table_class = routers_tables.RoutersTable
template_name = 'project/network_topology/iframe.html'
class NetworkView(n_views.IndexView):
table_class = networks_tables.NetworksTable
template_name = 'project/network_topology/iframe.html'
class RouterDetailView(r_views.DetailView):
table_classes = (ports_tables.PortsTable, )
template_name = 'project/network_topology/iframe.html'
def get_interfaces_data(self):
pass
class NetworkDetailView(n_views.DetailView):
table_classes = (subnets_tables.SubnetsTable, )
template_name = 'project/network_topology/iframe.html'
class NetworkTopologyView(tabs.TabView):
tab_group_class = topology_tabs.TopologyTabs
template_name = 'project/network_topology/index.html'
page_title = _("Network Topology")
def get_context_data(self, **kwargs):
context = super(NetworkTopologyView, self).get_context_data(**kwargs)
return utils.get_context(self.request, context)
class JSONView(View):
trans = TranslationHelper()
@property
def is_router_enabled(self):
network_config = getattr(settings, 'OPENSTACK_NEUTRON_NETWORK', {})
return network_config.get('enable_router', True)
def add_resource_url(self, view, resources):
tenant_id = self.request.user.tenant_id
for resource in resources:
if (resource.get('tenant_id')
and tenant_id != resource.get('tenant_id')):
continue
resource['url'] = reverse(view, None, [str(resource['id'])])
def _check_router_external_port(self, ports, router_id, network_id):
for port in ports:
if (port['network_id'] == network_id
and port['device_id'] == router_id):
return True
return False
def _get_servers(self, request):
# Get nova data
try:
servers, more = api.nova.server_list(request)
except Exception:
servers = []
data = []
console_type = getattr(settings, 'CONSOLE_TYPE', 'AUTO')
# lowercase of the keys will be used at the end of the console URL.
for server in servers:
server_data = {'name': server.name,
'status': self.trans.instance[server.status],
'original_status': server.status,
'task': getattr(server, 'OS-EXT-STS:task_state'),
'id': server.id}
# Avoid doing extra calls for console if the server is in
# a invalid status for console connection
if server.status.lower() not in console_invalid_status:
try:
console = i_console.get_console(
request, console_type, server)[0].lower()
server_data['console'] = console
except exceptions.NotAvailable:
pass
data.append(server_data)
self.add_resource_url('horizon:project:instances:detail', data)
return data
def _get_networks(self, request):
# Get neutron data
# if we didn't specify tenant_id, all networks shown as admin user.
# so it is need to specify the networks. However there is no need to
# specify tenant_id for subnet. The subnet which belongs to the public
# network is needed to draw subnet information on public network.
try:
neutron_networks = api.neutron.network_list_for_tenant(
request,
request.user.tenant_id)
except Exception:
neutron_networks = []
networks = []
for network in neutron_networks:
obj = {'name': network.name_or_id,
'id': network.id,
'subnets': [{'id': subnet.id,
'cidr': subnet.cidr}
for subnet in network.subnets],
'status': self.trans.network[network.status],
'original_status': network.status,
'router:external': network['router:external']}
self.add_resource_url('horizon:project:networks:subnets:detail',
obj['subnets'])
networks.append(obj)
# Add public networks to the networks list
if self.is_router_enabled:
try:
neutron_public_networks = api.neutron.network_list(
request,
**{'router:external': True})
except Exception:
neutron_public_networks = []
my_network_ids = [net['id'] for net in networks]
for publicnet in neutron_public_networks:
if publicnet.id in my_network_ids:
continue
try:
subnets = [{'id': subnet.id,
'cidr': subnet.cidr}
for subnet in publicnet.subnets]
self.add_resource_url(
'horizon:project:networks:subnets:detail', subnets)
except Exception:
subnets = []
networks.append({
'name': publicnet.name_or_id,
'id': publicnet.id,
'subnets': subnets,
'status': self.trans.network[publicnet.status],
'original_status': publicnet.status,
'router:external': publicnet['router:external']})
self.add_resource_url('horizon:project:networks:detail',
networks)
return sorted(networks,
key=lambda x: x.get('router:external'),
reverse=True)
def _get_routers(self, request):
if not self.is_router_enabled:
return []
try:
neutron_routers = api.neutron.router_list(
request,
tenant_id=request.user.tenant_id)
except Exception:
neutron_routers = []
routers = [{'id': router.id,
'name': router.name_or_id,
'status': self.trans.router[router.status],
'original_status': router.status,
'external_gateway_info': router.external_gateway_info}
for router in neutron_routers]
self.add_resource_url('horizon:project:routers:detail', routers)
return routers
def _get_ports(self, request, networks):
try:
neutron_ports = api.neutron.port_list(request)
except Exception:
neutron_ports = []
# we should filter out ports connected to non tenant networks
# which they have no visibility to
tenant_network_ids = [network['id'] for network in networks]
ports = [{'id': port.id,
'network_id': port.network_id,
'device_id': port.device_id,
'fixed_ips': port.fixed_ips,
'device_owner': port.device_owner,
'status': self.trans.port[port.status],
'original_status': port.status}
for port in neutron_ports
if port.device_owner != 'network:router_ha_interface'
and port.network_id in tenant_network_ids]
self.add_resource_url('horizon:project:networks:ports:detail',
ports)
return ports
def _prepare_gateway_ports(self, routers, ports):
# user can't see port on external network. so we are
# adding fake port based on router information
for router in routers:
external_gateway_info = router.get('external_gateway_info')
if not external_gateway_info:
continue
external_network = external_gateway_info.get(
'network_id')
if not external_network:
continue
if self._check_router_external_port(ports,
router['id'],
external_network):
continue
fake_port = {'id': 'gateway%s' % external_network,
'network_id': external_network,
'device_id': router['id'],
'fixed_ips': []}
ports.append(fake_port)
def get(self, request, *args, **kwargs):
networks = self._get_networks(request)
data = {'servers': self._get_servers(request),
'networks': networks,
'ports': self._get_ports(request, networks),
'routers': self._get_routers(request)}
self._prepare_gateway_ports(data['routers'], data['ports'])
json_string = json.dumps(data, cls=LazyTranslationEncoder,
ensure_ascii=False)
return HttpResponse(json_string, content_type='text/json')
|
from wallpaper_downloader import site_parser
def test_page_with_wallpapers_urls(
get_page_html_from_file,
get_wallpapers_urls_from_file,
get_wallpapers_names_from_file,
):
"""
Test '_find_wallpapers_urls' function of site_parser module.
Function is tested with the HTML where URLs of wallpapers are exist.
Args:
get_page_html_from_file (Fixture): fixture that return HTML.
get_wallpapers_urls_from_file (Fixture): fixture that return URLs of
wallpapers in format {'wallpaper_filename': 'wallpaper_url'}.
get_wallpapers_names_from_file (Fixture): fixture that return
names of wallpapers.
"""
page_html = get_page_html_from_file("page_with_wallpapers.html")
wallpapers_names = get_wallpapers_names_from_file
wallpapers_urls = site_parser._find_wallpapers_urls(
page_html,
wallpapers_names,
"1920x1080",
)
expected_wallpapers_urls = get_wallpapers_urls_from_file
assert wallpapers_urls == expected_wallpapers_urls
def test_page_without_wallpapers_urls(
get_page_html_from_file,
get_wallpapers_names_from_file,
):
"""
Test '_find_wallpapers_urls' function of site_parser module.
Function is tested with the HTML where URLs of wallpapers are not exist.
Args:
get_page_html_from_file (Fixture): fixture that return HTML.
get_wallpapers_names_from_file (Fixture): fixture that return
names of wallpapers.
"""
page_html = get_page_html_from_file("first_main_page.html")
wallpapers_names = get_wallpapers_names_from_file
wallpapers_urls = site_parser._find_wallpapers_urls(
page_html,
wallpapers_names,
"1920x1080",
)
assert wallpapers_urls == {}
|
from django.db import models
# Create your models here.
class Faq(models.Model):
question = models.CharField(max_length=1000)
answer = models.TextField(default='')
def __str__(self):
return self.question
class Qs(models.Model):
qs = models.CharField(max_length=1000)
def __str__(self):
return self.qs
|
#!/usr/bin/env python
# -*- test-case-name: twisted.names.test.test_examples -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Print the IP address for a given hostname. eg
python gethostbyname.py www.google.com
This script does a host lookup using the default Twisted Names
resolver, a chained resolver, which attempts to lookup a name from:
* local hosts file
* memory cache of previous lookup results
* system recursive DNS servers
"""
import sys
from twisted.names import client, error
from twisted.internet.task import react
from twisted.python import usage
class Options(usage.Options):
synopsis = 'Usage: gethostbyname.py HOSTNAME'
def parseArgs(self, hostname):
self['hostname'] = hostname
def printResult(address, hostname):
"""
Print the IP address or an error message if an IP address was not
found.
"""
if address:
sys.stdout.write(address + '\n')
else:
sys.stderr.write(
'ERROR: No IP adresses found for name %r\n' % (hostname,))
def printError(failure, hostname):
"""
Print a friendly error message if the hostname could not be
resolved.
"""
failure.trap(error.DNSNameError)
sys.stderr.write('ERROR: hostname not found %r\n' % (hostname,))
def main(reactor, *argv):
options = Options()
try:
options.parseOptions(argv)
except usage.UsageError as errortext:
sys.stderr.write(str(options) + '\n')
sys.stderr.write('ERROR: %s\n' % (errortext,))
raise SystemExit(1)
hostname = options['hostname']
d = client.getHostByName(hostname)
d.addCallback(printResult, hostname)
d.addErrback(printError, hostname)
return d
if __name__ == '__main__':
react(main, sys.argv[1:])
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers # type: ignore
from google.api_core import gapic_v1 # type: ignore
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.ads.googleads.v7.resources.types import topic_view
from google.ads.googleads.v7.services.types import topic_view_service
from .base import TopicViewServiceTransport, DEFAULT_CLIENT_INFO
class TopicViewServiceGrpcTransport(TopicViewServiceTransport):
"""gRPC backend transport for TopicViewService.
Service to manage topic views.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
def __init__(self, *,
host: str = 'googleads.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._ssl_channel_credentials = ssl_channel_credentials
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
warnings.warn("api_mtls_endpoint and client_cert_source are deprecated", DeprecationWarning)
host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443"
if credentials is None:
credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id)
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
ssl_credentials = SslCredentials().ssl_credentials
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
ssl_credentials=ssl_credentials,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._ssl_channel_credentials = ssl_credentials
else:
host = host if ":" in host else host + ":443"
if credentials is None:
credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES)
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
ssl_credentials=ssl_channel_credentials,
scopes=self.AUTH_SCOPES,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._stubs = {} # type: Dict[str, Callable]
# Run the base constructor.
super().__init__(
host=host,
credentials=credentials,
client_info=client_info,
)
@classmethod
def create_channel(cls,
host: str = 'googleads.googleapis.com',
credentials: ga_credentials.Credentials = None,
scopes: Optional[Sequence[str]] = None,
**kwargs) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
address (Optionsl[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
scopes=scopes or cls.AUTH_SCOPES,
**kwargs
)
def close(self):
self.grpc_channel.close()
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def get_topic_view(self) -> Callable[
[topic_view_service.GetTopicViewRequest],
topic_view.TopicView]:
r"""Return a callable for the get topic view method over gRPC.
Returns the requested topic view in full detail.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `HeaderError <>`__
`InternalError <>`__ `QuotaError <>`__ `RequestError <>`__
Returns:
Callable[[~.GetTopicViewRequest],
~.TopicView]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_topic_view' not in self._stubs:
self._stubs['get_topic_view'] = self.grpc_channel.unary_unary(
'/google.ads.googleads.v7.services.TopicViewService/GetTopicView',
request_serializer=topic_view_service.GetTopicViewRequest.serialize,
response_deserializer=topic_view.TopicView.deserialize,
)
return self._stubs['get_topic_view']
__all__ = (
'TopicViewServiceGrpcTransport',
)
|
import pytest
@pytest.mark.parametrize("idx", range(5))
def test_initial_approval_is_zero(gauge_v3_1, accounts, idx):
assert gauge_v3_1.allowance(accounts[0], accounts[idx]) == 0
def test_approve(gauge_v3_1, accounts):
gauge_v3_1.approve(accounts[1], 10 ** 19, {"from": accounts[0]})
assert gauge_v3_1.allowance(accounts[0], accounts[1]) == 10 ** 19
def test_modify_approve(gauge_v3_1, accounts):
gauge_v3_1.approve(accounts[1], 10 ** 19, {"from": accounts[0]})
gauge_v3_1.approve(accounts[1], 12345678, {"from": accounts[0]})
assert gauge_v3_1.allowance(accounts[0], accounts[1]) == 12345678
def test_revoke_approve(gauge_v3_1, accounts):
gauge_v3_1.approve(accounts[1], 10 ** 19, {"from": accounts[0]})
gauge_v3_1.approve(accounts[1], 0, {"from": accounts[0]})
assert gauge_v3_1.allowance(accounts[0], accounts[1]) == 0
def test_approve_self(gauge_v3_1, accounts):
gauge_v3_1.approve(accounts[0], 10 ** 19, {"from": accounts[0]})
assert gauge_v3_1.allowance(accounts[0], accounts[0]) == 10 ** 19
def test_only_affects_target(gauge_v3_1, accounts):
gauge_v3_1.approve(accounts[1], 10 ** 19, {"from": accounts[0]})
assert gauge_v3_1.allowance(accounts[1], accounts[0]) == 0
def test_returns_true(gauge_v3_1, accounts):
tx = gauge_v3_1.approve(accounts[1], 10 ** 19, {"from": accounts[0]})
assert tx.return_value is True
def test_approval_event_fires(accounts, gauge_v3_1):
tx = gauge_v3_1.approve(accounts[1], 10 ** 19, {"from": accounts[0]})
assert len(tx.events) == 1
assert tx.events["Approval"].values() == [accounts[0], accounts[1], 10 ** 19]
def test_increase_allowance(accounts, gauge_v3_1):
gauge_v3_1.approve(accounts[1], 100, {"from": accounts[0]})
gauge_v3_1.increaseAllowance(accounts[1], 403, {"from": accounts[0]})
assert gauge_v3_1.allowance(accounts[0], accounts[1]) == 503
def test_decrease_allowance(accounts, gauge_v3_1):
gauge_v3_1.approve(accounts[1], 100, {"from": accounts[0]})
gauge_v3_1.decreaseAllowance(accounts[1], 34, {"from": accounts[0]})
assert gauge_v3_1.allowance(accounts[0], accounts[1]) == 66
|
"""Discover and run std-library "unittest" style tests."""
import sys
import traceback
import types
from typing import Any
from typing import Callable
from typing import Generator
from typing import Iterable
from typing import List
from typing import Optional
from typing import Tuple
from typing import Type
from typing import TYPE_CHECKING
from typing import Union
import _pytest._code
import pytest
from _pytest.compat import getimfunc
from _pytest.compat import is_async_function
from _pytest.config import hookimpl
from _pytest.fixtures import FixtureRequest
from _pytest.nodes import Collector
from _pytest.nodes import Item
from _pytest.outcomes import exit
from _pytest.outcomes import fail
from _pytest.outcomes import skip
from _pytest.outcomes import xfail
from _pytest.python import Class
from _pytest.python import Function
from _pytest.python import PyCollector
from _pytest.runner import CallInfo
from _pytest.scope import Scope
if TYPE_CHECKING:
import unittest
import twisted.trial.unittest
_SysExcInfoType = Union[
Tuple[Type[BaseException], BaseException, types.TracebackType],
Tuple[None, None, None],
]
def pytest_pycollect_makeitem(
collector: PyCollector, name: str, obj: object
) -> Optional["UnitTestCase"]:
# Has unittest been imported and is obj a subclass of its TestCase?
try:
ut = sys.modules["unittest"]
# Type ignored because `ut` is an opaque module.
if not issubclass(obj, ut.TestCase): # type: ignore
return None
except Exception:
return None
# Yes, so let's collect it.
item: UnitTestCase = UnitTestCase.from_parent(collector, name=name, obj=obj)
return item
class UnitTestCase(Class):
# Marker for fixturemanger.getfixtureinfo()
# to declare that our children do not support funcargs.
nofuncargs = True
def collect(self) -> Iterable[Union[Item, Collector]]:
from unittest import TestLoader
cls = self.obj
if not getattr(cls, "__test__", True):
return
skipped = _is_skipped(cls)
if not skipped:
self._inject_setup_teardown_fixtures(cls)
self._inject_setup_class_fixture()
self.session._fixturemanager.parsefactories(self, unittest=True)
loader = TestLoader()
foundsomething = False
for name in loader.getTestCaseNames(self.obj):
x = getattr(self.obj, name)
if not getattr(x, "__test__", True):
continue
funcobj = getimfunc(x)
yield TestCaseFunction.from_parent(self, name=name, callobj=funcobj)
foundsomething = True
if not foundsomething:
runtest = getattr(self.obj, "runTest", None)
if runtest is not None:
ut = sys.modules.get("twisted.trial.unittest", None)
# Type ignored because `ut` is an opaque module.
if ut is None or runtest != ut.TestCase.runTest: # type: ignore
yield TestCaseFunction.from_parent(self, name="runTest")
def _inject_setup_teardown_fixtures(self, cls: type) -> None:
"""Injects a hidden auto-use fixture to invoke setUpClass/setup_method and corresponding
teardown functions (#517)."""
class_fixture = _make_xunit_fixture(
cls,
"setUpClass",
"tearDownClass",
"doClassCleanups",
scope=Scope.Class,
pass_self=False,
)
if class_fixture:
cls.__pytest_class_setup = class_fixture # type: ignore[attr-defined]
method_fixture = _make_xunit_fixture(
cls,
"setup_method",
"teardown_method",
None,
scope=Scope.Function,
pass_self=True,
)
if method_fixture:
cls.__pytest_method_setup = method_fixture # type: ignore[attr-defined]
def _make_xunit_fixture(
obj: type,
setup_name: str,
teardown_name: str,
cleanup_name: Optional[str],
scope: Scope,
pass_self: bool,
):
setup = getattr(obj, setup_name, None)
teardown = getattr(obj, teardown_name, None)
if setup is None and teardown is None:
return None
if cleanup_name:
cleanup = getattr(obj, cleanup_name, lambda *args: None)
else:
def cleanup(*args):
pass
@pytest.fixture(
scope=scope.value,
autouse=True,
# Use a unique name to speed up lookup.
name=f"_unittest_{setup_name}_fixture_{obj.__qualname__}",
)
def fixture(self, request: FixtureRequest) -> Generator[None, None, None]:
if _is_skipped(self):
reason = self.__unittest_skip_why__
raise pytest.skip.Exception(reason, _use_item_location=True)
if setup is not None:
try:
if pass_self:
setup(self, request.function)
else:
setup()
# unittest does not call the cleanup function for every BaseException, so we
# follow this here.
except Exception:
if pass_self:
cleanup(self)
else:
cleanup()
raise
yield
try:
if teardown is not None:
if pass_self:
teardown(self, request.function)
else:
teardown()
finally:
if pass_self:
cleanup(self)
else:
cleanup()
return fixture
class TestCaseFunction(Function):
nofuncargs = True
_excinfo: Optional[List[_pytest._code.ExceptionInfo[BaseException]]] = None
_testcase: Optional["unittest.TestCase"] = None
def _getobj(self):
assert self.parent is not None
# Unlike a regular Function in a Class, where `item.obj` returns
# a *bound* method (attached to an instance), TestCaseFunction's
# `obj` returns an *unbound* method (not attached to an instance).
# This inconsistency is probably not desirable, but needs some
# consideration before changing.
return getattr(self.parent.obj, self.originalname) # type: ignore[attr-defined]
def setup(self) -> None:
# A bound method to be called during teardown() if set (see 'runtest()').
self._explicit_tearDown: Optional[Callable[[], None]] = None
assert self.parent is not None
self._testcase = self.parent.obj(self.name) # type: ignore[attr-defined]
self._obj = getattr(self._testcase, self.name)
if hasattr(self, "_request"):
self._request._fillfixtures()
def teardown(self) -> None:
if self._explicit_tearDown is not None:
self._explicit_tearDown()
self._explicit_tearDown = None
self._testcase = None
self._obj = None
def startTest(self, testcase: "unittest.TestCase") -> None:
pass
def _addexcinfo(self, rawexcinfo: "_SysExcInfoType") -> None:
# Unwrap potential exception info (see twisted trial support below).
rawexcinfo = getattr(rawexcinfo, "_rawexcinfo", rawexcinfo)
try:
excinfo = _pytest._code.ExceptionInfo[BaseException].from_exc_info(rawexcinfo) # type: ignore[arg-type]
# Invoke the attributes to trigger storing the traceback
# trial causes some issue there.
excinfo.value
excinfo.traceback
except TypeError:
try:
try:
values = traceback.format_exception(*rawexcinfo)
values.insert(
0,
"NOTE: Incompatible Exception Representation, "
"displaying natively:\n\n",
)
fail("".join(values), pytrace=False)
except (fail.Exception, KeyboardInterrupt):
raise
except BaseException:
fail(
"ERROR: Unknown Incompatible Exception "
"representation:\n%r" % (rawexcinfo,),
pytrace=False,
)
except KeyboardInterrupt:
raise
except fail.Exception:
excinfo = _pytest._code.ExceptionInfo.from_current()
self.__dict__.setdefault("_excinfo", []).append(excinfo)
def addError(
self, testcase: "unittest.TestCase", rawexcinfo: "_SysExcInfoType"
) -> None:
try:
if isinstance(rawexcinfo[1], exit.Exception):
exit(rawexcinfo[1].msg)
except TypeError:
pass
self._addexcinfo(rawexcinfo)
def addFailure(
self, testcase: "unittest.TestCase", rawexcinfo: "_SysExcInfoType"
) -> None:
self._addexcinfo(rawexcinfo)
def addSkip(self, testcase: "unittest.TestCase", reason: str) -> None:
try:
raise pytest.skip.Exception(reason, _use_item_location=True)
except skip.Exception:
self._addexcinfo(sys.exc_info())
def addExpectedFailure(
self,
testcase: "unittest.TestCase",
rawexcinfo: "_SysExcInfoType",
reason: str = "",
) -> None:
try:
xfail(str(reason))
except xfail.Exception:
self._addexcinfo(sys.exc_info())
def addUnexpectedSuccess(
self,
testcase: "unittest.TestCase",
reason: Optional["twisted.trial.unittest.Todo"] = None,
) -> None:
msg = "Unexpected success"
if reason:
msg += f": {reason.reason}"
# Preserve unittest behaviour - fail the test. Explicitly not an XPASS.
try:
fail(msg, pytrace=False)
except fail.Exception:
self._addexcinfo(sys.exc_info())
def addSuccess(self, testcase: "unittest.TestCase") -> None:
pass
def stopTest(self, testcase: "unittest.TestCase") -> None:
pass
def runtest(self) -> None:
from _pytest.debugging import maybe_wrap_pytest_function_for_tracing
assert self._testcase is not None
maybe_wrap_pytest_function_for_tracing(self)
# Let the unittest framework handle async functions.
if is_async_function(self.obj):
# Type ignored because self acts as the TestResult, but is not actually one.
self._testcase(result=self) # type: ignore[arg-type]
else:
# When --pdb is given, we want to postpone calling tearDown() otherwise
# when entering the pdb prompt, tearDown() would have probably cleaned up
# instance variables, which makes it difficult to debug.
# Arguably we could always postpone tearDown(), but this changes the moment where the
# TestCase instance interacts with the results object, so better to only do it
# when absolutely needed.
if self.config.getoption("usepdb") and not _is_skipped(self.obj):
self._explicit_tearDown = self._testcase.tearDown
setattr(self._testcase, "tearDown", lambda *args: None)
# We need to update the actual bound method with self.obj, because
# wrap_pytest_function_for_tracing replaces self.obj by a wrapper.
setattr(self._testcase, self.name, self.obj)
try:
self._testcase(result=self) # type: ignore[arg-type]
finally:
delattr(self._testcase, self.name)
def _prunetraceback(
self, excinfo: _pytest._code.ExceptionInfo[BaseException]
) -> None:
super()._prunetraceback(excinfo)
traceback = excinfo.traceback.filter(
lambda x: not x.frame.f_globals.get("__unittest")
)
if traceback:
excinfo.traceback = traceback
@hookimpl(tryfirst=True)
def pytest_runtest_makereport(item: Item, call: CallInfo[None]) -> None:
if isinstance(item, TestCaseFunction):
if item._excinfo:
call.excinfo = item._excinfo.pop(0)
try:
del call.result
except AttributeError:
pass
# Convert unittest.SkipTest to pytest.skip.
# This is actually only needed for nose, which reuses unittest.SkipTest for
# its own nose.SkipTest. For unittest TestCases, SkipTest is already
# handled internally, and doesn't reach here.
unittest = sys.modules.get("unittest")
if (
unittest
and call.excinfo
and isinstance(call.excinfo.value, unittest.SkipTest) # type: ignore[attr-defined]
):
excinfo = call.excinfo
call2 = CallInfo[None].from_call(
lambda: pytest.skip(str(excinfo.value)), call.when
)
call.excinfo = call2.excinfo
# Twisted trial support.
@hookimpl(hookwrapper=True)
def pytest_runtest_protocol(item: Item) -> Generator[None, None, None]:
if isinstance(item, TestCaseFunction) and "twisted.trial.unittest" in sys.modules:
ut: Any = sys.modules["twisted.python.failure"]
Failure__init__ = ut.Failure.__init__
check_testcase_implements_trial_reporter()
def excstore(
self, exc_value=None, exc_type=None, exc_tb=None, captureVars=None
):
if exc_value is None:
self._rawexcinfo = sys.exc_info()
else:
if exc_type is None:
exc_type = type(exc_value)
self._rawexcinfo = (exc_type, exc_value, exc_tb)
try:
Failure__init__(
self, exc_value, exc_type, exc_tb, captureVars=captureVars
)
except TypeError:
Failure__init__(self, exc_value, exc_type, exc_tb)
ut.Failure.__init__ = excstore
yield
ut.Failure.__init__ = Failure__init__
else:
yield
def check_testcase_implements_trial_reporter(done: List[int] = []) -> None:
if done:
return
from zope.interface import classImplements
from twisted.trial.itrial import IReporter
classImplements(TestCaseFunction, IReporter)
done.append(1)
def _is_skipped(obj) -> bool:
"""Return True if the given object has been marked with @unittest.skip."""
return bool(getattr(obj, "__unittest_skip__", False))
|
# -*- coding: utf-8 -*-
from gen_stats import Checkin, load_checkins
import bisect
from PIL import Image
import urllib
import cStringIO
WIDTH = 5616
HEIGHT = 3744
SIZES = {
'photo_img_sm': 100,
'photo_img_md': 320,
'photo_img_lg': 640
}
def get_rowcol(img_size):
cols = WIDTH / float(img_size)
rows = HEIGHT / float(img_size)
return (rows, cols)
def find_ut_size(req_size):
idx = bisect.bisect(SIZES.values(), req_size)
return SIZES.values()[idx]
def get_key(mlist, svalue):
for key, value in mlist.items():
if svalue == value:
return key
def get_image(checkin, size):
filename_size = get_key(SIZES, find_ut_size(size))
filename = checkin.photos(filename_size)[0]
image = Image.open(cStringIO.StringIO(urllib.urlopen(filename).read()))
image = image.resize((size, size), Image.ANTIALIAS)
return image
diff = 1000000
closest_size = None
if __name__ == '__main__':
checkins = [Checkin(c) for c in load_checkins('run3.json')]
checkins += [Checkin(c) for c in load_checkins('2015.json')]
with_imgs = [c for c in checkins if c.has_media()]
num_images = len(with_imgs)
image_size = None
for size in range(1, 640):
rows, cols = get_rowcol(size)
num = rows * cols
new_diff = num_images - num
if new_diff > 0 and new_diff < diff:
diff = new_diff
closest_size = size
if num == num_images:
image_size = size
break
if not image_size:
image_size = closest_size
rows, cols = get_rowcol(image_size)
images = [get_image(c, image_size) for c in with_imgs]
i = 0
background = Image.new('RGBA', (WIDTH, HEIGHT), (0, 0, 0, 255))
for col in range(0, int(cols)):
for row in range(0, int(rows)):
image = images[i]
background.paste(image, (image_size * col, image_size * row))
i += 1
background.show()
background.save("out2.jpg", "JPEG", quality=100, optimize=True)
|
import os
import logging
import uuid
from datetime import datetime, timedelta
from typing import List, Tuple, Optional
import pytz
from fastapi import Depends, APIRouter, status
from okdata.resource_auth import ResourceAuthorizer
from models import (
CreateWebhookTokenBody,
WebhookTokenAuthResponse,
WebhookTokenItem,
WebhookTokenOperation,
)
from resources.authorizer import AuthInfo
from resources.errors import ErrorResponse, error_message_models
from webhook_tokens import WebhookTokensTable
router = APIRouter()
logger = logging.getLogger()
logger.setLevel(os.environ.get("LOG_LEVEL", logging.INFO))
def webhook_tokens_table():
return WebhookTokensTable()
def resource_authorizer():
return ResourceAuthorizer()
@router.post(
"/{dataset_id}/tokens",
status_code=status.HTTP_201_CREATED,
responses=error_message_models(
status.HTTP_400_BAD_REQUEST,
status.HTTP_500_INTERNAL_SERVER_ERROR,
status.HTTP_403_FORBIDDEN,
),
)
def create_webhook_token(
dataset_id: str,
body: CreateWebhookTokenBody,
auth_info: AuthInfo = Depends(),
resource_authorizer=Depends(resource_authorizer),
webhook_tokens_table=Depends(webhook_tokens_table),
) -> WebhookTokenItem:
if not resource_authorizer.has_access(
auth_info.bearer_token, "okdata:dataset:admin", f"okdata:dataset:{dataset_id}"
):
raise ErrorResponse(status.HTTP_403_FORBIDDEN, "Forbidden")
# Create token
token_created = datetime.utcnow().replace(tzinfo=pytz.utc)
token_expires = token_created + timedelta(days=(365 * 2))
webhook_token_item = WebhookTokenItem(
token=uuid.uuid4(),
created_by=auth_info.principal_id,
dataset_id=dataset_id,
operation=body.operation,
created_at=token_created,
expires_at=token_expires,
)
webhook_tokens_table.put_webhook_token_item(webhook_token_item)
return webhook_token_item
@router.get(
"/{dataset_id}/tokens",
status_code=status.HTTP_200_OK,
responses=error_message_models(
status.HTTP_500_INTERNAL_SERVER_ERROR,
status.HTTP_403_FORBIDDEN,
),
)
def list_webhook_tokens(
dataset_id: str,
auth_info: AuthInfo = Depends(),
resource_authorizer=Depends(resource_authorizer),
webhook_tokens_table=Depends(webhook_tokens_table),
) -> List[WebhookTokenItem]:
is_admin = resource_authorizer.has_access(
auth_info.bearer_token, "okdata:dataset:admin", f"okdata:dataset:{dataset_id}"
)
if not is_admin:
raise ErrorResponse(status.HTTP_403_FORBIDDEN, "Forbidden")
return webhook_tokens_table.list_webhook_token_items(dataset_id)
@router.delete(
"/{dataset_id}/tokens/{webhook_token}",
status_code=status.HTTP_200_OK,
responses=error_message_models(
status.HTTP_500_INTERNAL_SERVER_ERROR,
status.HTTP_403_FORBIDDEN,
status.HTTP_404_NOT_FOUND,
),
)
def delete(
dataset_id: str,
webhook_token: str,
auth_info: AuthInfo = Depends(),
resource_authorizer=Depends(resource_authorizer),
webhook_tokens_table=Depends(webhook_tokens_table),
) -> dict:
is_admin = resource_authorizer.has_access(
auth_info.bearer_token, "okdata:dataset:admin", f"okdata:dataset:{dataset_id}"
)
if not is_admin:
raise ErrorResponse(status.HTTP_403_FORBIDDEN, "Forbidden")
webhook_item: WebhookTokenItem = webhook_tokens_table.get_webhook_token_item(
webhook_token, dataset_id
)
if not webhook_item:
raise ErrorResponse(
status.HTTP_404_NOT_FOUND,
f"Provided token does not exist for dataset {dataset_id}",
)
webhook_tokens_table.delete_webhook_token_item(webhook_token, dataset_id)
return {"message": f"Deleted {webhook_token} for dataset {dataset_id}"}
@router.get(
"/{dataset_id}/tokens/{webhook_token}/authorize",
status_code=status.HTTP_200_OK,
responses=error_message_models(
status.HTTP_500_INTERNAL_SERVER_ERROR,
),
)
def authorize_webhook_token(
dataset_id: str,
webhook_token: str,
operation: WebhookTokenOperation,
auth_info: AuthInfo = Depends(),
webhook_tokens_table=Depends(webhook_tokens_table),
) -> WebhookTokenAuthResponse:
try:
webhook_token_item = webhook_tokens_table.get_webhook_token_item(
webhook_token, dataset_id
)
if not webhook_token_item:
return WebhookTokenAuthResponse(
access=False,
reason=f"Provided token is not associated to dataset-id: {dataset_id}",
)
has_access, reason = validate_webhook_token(webhook_token_item, operation)
return WebhookTokenAuthResponse(access=has_access, reason=reason)
except Exception as e:
logger.exception(e)
raise ErrorResponse(
status.HTTP_500_INTERNAL_SERVER_ERROR,
"Internal Server Error",
)
def validate_webhook_token(
webhook_token_item: WebhookTokenItem, operation: WebhookTokenOperation
) -> Tuple[bool, Optional[str]]:
if webhook_token_item.operation != operation:
return (
False,
f"Provided token does not have access to perform {operation.value} on {webhook_token_item.dataset_id}",
)
dt_now = datetime.utcnow().replace(tzinfo=pytz.utc)
token_expired = webhook_token_item.expires_at < dt_now
if token_expired:
return False, "Provided token is expired"
return True, None
|
import cgi
import json
import urllib.parse, socket, http.client
import os
import pickle
import ssl
from act.common import aCTConfig
class aCTPanda:
def __init__(self,logger, proxyfile):
self.conf = aCTConfig.aCTConfigAPP()
server = self.conf.get(['panda','server'])
u = urllib.parse.urlparse(server)
self.hostport = u.netloc
self.topdir = u.path
proxypath = proxyfile
self.log = logger
# timeout in seconds
self.timeout = int(self.conf.get(['panda','timeout']))
socket.setdefaulttimeout(self.timeout)
self.context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
self.context.load_cert_chain(proxypath, keyfile=proxypath)
self.context.verify_mode = ssl.CERT_REQUIRED
self.context.load_verify_locations('/etc/pki/tls/certs/CERN-bundle.pem')
def __HTTPConnect__(self, mode, node):
urldata = None
try:
conn = http.client.HTTPSConnection(self.hostport, context=self.context)
rdata = urllib.parse.urlencode(node)
conn.request("POST", self.topdir+mode, rdata)
resp = conn.getresponse()
urldata = resp.read().decode()
conn.close()
except Exception as x:
self.log.error("error in connection: %s" %x)
return urldata
def getQueueStatus(self, queue=None):
node = {}
if queue:
node = {'site': queue}
self.log.debug('Getting queue info')
urldata = self.__HTTPConnect__('getJobStatisticsWithLabel', node)
if not urldata:
self.log.warning('No queue info returned by panda')
return None
try:
data = pickle.loads(urldata.encode())
except Exception as e:
self.log.error('Could not load panda response: %s' % urldata)
return None
return data
def getJob(self,siteName,prodSourceLabel=None,getEventRanges=True):
node={}
node['siteName']=siteName
if prodSourceLabel is not None:
node['prodSourceLabel']=prodSourceLabel
pid = None
urldesc=None
eventranges=None
self.log.debug('Fetching jobs for %s %s' % ( siteName, prodSourceLabel) )
urldata=self.__HTTPConnect__('getJob',node)
if not urldata:
self.log.info('No job from panda')
return (None,None,None,None)
try:
urldesc = urllib.parse.parse_qs(urldata)
except Exception as x:
self.log.error(x)
return (None,None,None,None)
self.log.info('panda returned %s' % urldesc)
status = urldesc['StatusCode'][0]
if status == '20':
self.log.debug('No Panda activated jobs available')
return (-1,None,None,None)
elif status == '0':
pid = urldesc['PandaID'][0]
self.log.info('New Panda job with ID %s' % pid)
prodSourceLabel = urldesc['prodSourceLabel'][0]
if getEventRanges and 'eventService' in urldesc and urldesc['eventService'][0] == 'True':
node = {}
node['pandaID'] = urldesc['PandaID'][0]
node['jobsetID'] = urldesc['jobsetID'][0]
node['taskID'] = urldesc['taskID'][0]
node['nRanges'] = 500 # TODO: configurable?
if siteName == 'BOINC-ES':
node['nRanges'] = 100
eventranges = self.getEventRanges(node)
elif status == '60':
self.log.error('Failed to contact Panda, proxy may have expired')
else:
self.log.error('Check out what this Panda rc means %s' % status)
self.log.debug("%s %s" % (pid,urldesc))
return (pid,urldata,eventranges,prodSourceLabel)
def getEventRanges(self, node):
self.log.debug('%s: Fetching event ranges' % node['pandaID'])
urldata=self.__HTTPConnect__('getEventRanges', node)
if not urldata:
self.log.info('%s: Could not get event ranges from panda' % node['pandaID'])
return None
try:
urldesc = cgi.parse_qs(urldata)
except Exception as x:
self.log.error(x)
return None
self.log.debug('%s: Panda returned %s' % (node['pandaID'], urldesc))
status = urldesc['StatusCode'][0]
if status == '0':
return urldesc['eventRanges'][0]
if status == '60':
self.log.error('Failed to contact Panda, proxy may have expired')
else:
self.log.error('Check out what this Panda rc means %s' % status)
return None
def updateEventRange(self, node):
self.log.debug('Updating event range %s: %s' % (node['eventRangeID'], str(node)))
urldata=self.__HTTPConnect__('updateEventRange', node)
self.log.debug('panda returned %s' % str(urldata))
if not urldata:
self.log.info('Could not update event ranges in panda')
return None
try:
urldesc = cgi.parse_qs(urldata)
except Exception as x:
self.log.error(x)
return None
return urldesc
def updateEventRanges(self, node):
urldata=self.__HTTPConnect__('updateEventRanges', node)
self.log.debug('panda returned %s' % str(urldata))
if not urldata:
self.log.info('Could not update event ranges in panda')
return None
try:
urldesc = cgi.parse_qs(urldata)
except Exception as x:
self.log.error(x)
return None
return urldesc
def getStatus(self,pandaId):
self.log.info("entry %d" % pandaId)
node={}
node['ids']=pandaId
urldesc=None
urldata=self.__HTTPConnect__('getStatus',node)
try:
urldesc = cgi.parse_qs(urldata)
except Exception as x:
self.log.error(x)
return None
return urldesc
def updateStatus(self,pandaId,state,desc={}):
node={}
node['jobId']=pandaId
node['state']=state
node['schedulerID']=self.conf.get(['panda','schedulerid'])
if desc:
for key in desc.keys():
node[key]=desc[key]
# protection against bad pickles
if 'jobId' not in node or not node['jobId']:
node['jobId'] = pandaId
if 'state' not in node or not node['state']:
node['state'] = state
urldesc=None
urldata=self.__HTTPConnect__('updateJob',node)
#self.log.debug('panda returned %s' % str(urldata))
try:
urldesc = cgi.parse_qs(urldata)
except Exception as x:
self.log.error(x)
return None
return urldesc
def updateStatuses(self, jobs):
# Caller must make sure jobId and state are defined for each job
jobdata = []
for job in jobs:
node = job
node['schedulerID'] = self.conf.get(['panda','schedulerid'])
jobdata.append(node)
urldata=self.__HTTPConnect__('updateJobsInBulk', {'jobList': json.dumps(jobdata)})
try:
urldesc = json.loads(urldata)
except Exception as x:
self.log.error(x)
return {}
return urldesc
def queryJobInfo(self, cloud='ND'):
node={}
node['cloud']=cloud
node['schedulerID']=self.conf.get(['panda','schedulerid'])
try:
urldata=self.__HTTPConnect__('queryJobInfoPerCloud',node)
except:
return []
try:
return pickle.loads(urldata)
except:
return []
if __name__ == '__main__':
from act.common.aCTLogger import aCTLogger
logger = aCTLogger('test')
log = logger()
p = aCTPanda(log, os.environ['X509_USER_PROXY'])
print(p.getQueueStatus('UIO'))
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import unittest
import pbtest
import json
class SuperCookieTest(pbtest.PBSeleniumTest):
"""Make sure we detect potential supercookies. """
def has_supercookies(self, origin):
"""Check if the given origin has supercookies in PB's localStorage."""
self.load_url(pbtest.PB_CHROME_BG_URL, wait_on_site=1)
get_sc_domains_js = "return JSON.stringify(pb.storage."\
"getBadgerStorageObject('supercookie_domains').getItemClones())"
supercookieDomains = json.loads(self.js(get_sc_domains_js))
return origin in supercookieDomains
def test_should_detect_ls_of_third_party_frame(self):
"""We get some intermittent failures for this test.
It seems we sometimes miss the setting of localStorage items,
perhaps because the script runs before we start intercepting the calls.
Perhaps related to: https://github.com/ghostwords/chameleon/issues/5
"""
self.load_url("https://rawgit.com/gunesacar/24d81a5c964cb563614162c264be32f0/raw/8fa10f97b87343dfb62ae9b98b753c73a995157e/frame_ls.html", # noqa
wait_on_site=5)
self.driver.switch_to_frame(self.driver.
find_element_by_tag_name("iframe"))
print(self.js("return localStorage['frameId']"))
self.assertTrue(self.has_supercookies("githack.com"))
def test_should_not_detect_low_entropy_ls_of_third_party_frame(self):
self.load_url("https://rawgit.com/gunesacar/gunesacar/6f0c39fb728a218ccd91215bfefbd4e0/raw/f438eb4e5ce10dc8623a8834b1298fd4a846c6fa/low_entropy_localstorage_from_third_party_script.html", # noqa
wait_on_site=5)
self.assertFalse(self.has_supercookies("githack.com"))
def test_should_not_detect_first_party_ls(self):
self.load_url("https://gistcdn.githack.com/gunesacar/43e2ad2b76fa5a7f7c57/raw/44e7303338386514f1f5bb4166c8fd24a92e97fe/set_ls.html", # noqa
wait_on_site=5)
self.assertFalse(self.has_supercookies("githack.com"))
def test_should_not_detect_ls_of_third_party_script(self):
# a third-party script included by the top page (not a 3rd party frame)
self.load_url("https://rawgit.com/gunesacar/b366e3b03231dbee9709fe0a614faf10/raw/48e02456aa257e272092b398772a712391cf8b11/localstorage_from_third_party_script.html", # noqa
wait_on_site=5)
self.assertFalse(self.has_supercookies("githack.com"))
if __name__ == "__main__":
unittest.main()
|
from .rc522 import RC522
from py522.exceptions import NoReplyException, InvalidBCCException, ReaderException
import serial
import time
class RC522Uart(RC522):
BAUD_REG_VALUE = {
7200: 0xFA,
9600: 0xEB,
14400: 0xDA,
19200: 0xCB,
38400: 0xAB,
57600: 0x9A,
115200: 0x7A,
128000: 0x74,
230400: 0x5A,
460800: 0x3A,
921600: 0x1C,
1228800: 0x15
}
def __init__(self, port, speed=9600):
super().__init__()
self.port = serial.Serial(port, speed, timeout=1)
self.hard_reset_negated = False
def reset(self):
self.hard_reset()
self.soft_reset()
# On reset, baud rate gets reset as well
self.port.baudrate = 9600
def hard_reset(self):
self.port.dtr = not self.hard_reset_negated
time.sleep(0.01)
self.port.dtr = self.hard_reset_negated
time.sleep(0.01)
def change_baud_rate(self, new_speed):
reg_value = RC522Uart.BAUD_REG_VALUE.get(new_speed)
if reg_value == None:
raise ReaderException('Unsupported baudrate %s' % str(new_speed))
self._regwrite(RC522.Reg.SerialSpeed, reg_value)
self.port.baudrate = new_speed
def _regread(self, reg):
return self._regreadbulk(reg)[0]
def _regreadbulk(self, reg, count=1):
assert(reg >= 0 and reg <= 0x3F)
req = bytes([reg | 0x80]) * count
if self.port.write(req) != len(req):
raise ReaderException('Could not send read request')
data = self.port.read(count)
if len(data) != count:
raise ReaderException('Could not read register value')
#print('%s -> %s' % (RC522.Reg.name(reg), data.hex()))
return data
def _regwrite(self, reg, value):
value = bytes([value])
return self._regwritebulk(reg, value)
def _regwritebulk(self, reg, data):
assert(reg >= 0 and reg <= 0x3F)
#print('%s <- %s' % (RC522.Reg.name(reg), data.hex()))
req = bytearray()
for b in data:
req.append(reg)
req.append(b)
if self.port.write(req) != len(req):
raise ReaderException('Could not send write request')
ack = self.port.read(len(data))
if ack != bytes([reg]) * len(data):
raise ReaderException('Incorrect write acknowledgement from PCD')
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2020 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from __future__ import unicode_literals
from django.db import migrations
from pipeline.engine import states
def reverse_func(apps, schema_editor):
pass
def forward_func(apps, schema_editor):
PipelineInstance = apps.get_model("pipeline", "PipelineInstance")
Status = apps.get_model("engine", "Status")
revoked_status = Status.objects.filter(state=states.REVOKED).values("id", "archived_time")
id_to_time = {status["id"]: status["archived_time"] for status in revoked_status}
instances = PipelineInstance.objects.filter(instance_id__in=list(id_to_time.keys()))
for inst in instances:
inst.finish_time = id_to_time[inst.instance_id]
inst.is_revoked = True
inst.save()
class Migration(migrations.Migration):
dependencies = [
("pipeline", "0022_pipelineinstance_is_revoked"),
]
operations = [migrations.RunPython(forward_func, reverse_func)]
|
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.nn.utils.weight_norm as wn
from torch.nn.modules.batchnorm import _BatchNorm
import numpy as np
import pdb
import os
# ------------------------------------------------------------------------------
# Utility Methods
# ------------------------------------------------------------------------------
def flatten_sum(logps):
while len(logps.size()) > 1:
logps = logps.sum(dim=-1)
return logps
# ------------------------------------------------------------------------------
# Logging
# ------------------------------------------------------------------------------
def save_session(model, optim, args, epoch):
path = os.path.join(args.save_dir, str(epoch))
if not os.path.exists(path):
os.makedirs(path)
# save the model and optimizer state
torch.save(model.state_dict(), os.path.join(path, 'model.pth'))
torch.save(optim.state_dict(), os.path.join(path, 'optim.pth'))
print('Successfully saved model')
def load_session(model, optim, args):
try:
start_epoch = int(args.load_dir.split('/')[-1])
model.load_state_dict(torch.load(os.path.join(args.load_dir, 'model.pth')))
optim.load_state_dict(torch.load(os.path.join(args.load_dir, 'optim.pth')))
print('Successfully loaded model')
except Exception as e:
print('Could not restore session properly')
return model, optim, start_epoch
# ------------------------------------------------------------------------------
# Distributions
# ------------------------------------------------------------------------------
def standard_gaussian(shape):
mean, logsd = [torch.cuda.FloatTensor(shape).fill_(0.) for _ in range(2)]
return gaussian_diag(mean, logsd)
def gaussian_diag(mean, logsd):
class o(object):
Log2PI = float(np.log(2 * np.pi))
pass
def logps(x):
return -0.5 * (o.Log2PI + 2. * logsd + ((x - mean) ** 2) / torch.exp(2. * logsd))
def sample():
eps = torch.cuda.FloatTensor(mean.size()).normal_()
return mean + torch.exp(logsd) * eps
o.logp = lambda x: flatten_sum(o.logps(x))
return o
def laplace_diag(mu, log_b):
'''
log(p(x;mu,b)) = sum -|x_i-mu_i|/b
:param mu: mean
:param b: var is 2b^2
'''
class o(object):
def logps(x):
return -1 * (torch.abs((x-mu)/torch.exp(log_b)) +log_b + float(np.log(2)))
def sample():
eps = 0.0000001
unif = torch.clamp(torch.rand(size=mu.shape) - .5, min=-0.5+eps, max=0.5-eps).cuda()
samples = mu - torch.exp(log_b) * torch.sign(unif) * torch.log(1 - 2. * torch.abs(unif))
return samples
o.logp = lambda x: flatten_sum(o.logps(x))
return o
def cauchy_diag():
class o(object):
def logps(x):
return -1.*(float(np.log(np.pi)) + torch.log(1+x**2))
o.logp = lambda x: flatten_sum(o.logps(x))
return o
def indexes_to_one_hot(indexes, n_classes=None):
"""Converts a vector of indexes to a batch of one-hot vectors. """
indexes = indexes.type(torch.int64).view(-1, 1)
n_classes = n_classes if n_classes is not None else int(torch.max(indexes)) + 1
if indexes.is_cuda:
one_hots = torch.zeros(indexes.size()[0], n_classes).cuda().scatter_(1, indexes, 1)
else:
one_hots = torch.zeros(indexes.size()[0], n_classes).scatter_(1, indexes, 1)
#one_hots = one_hots.view(*indexes.shape, -1)
return one_hots
def logmeanexp(inputs, dim=None, keepdim=False):
"""Numerically stable logsumexp.
Args:
inputs: A Variable with any shape.
dim: An integer.
keepdim: A boolean.
Returns:
Equivalent of log(sum(exp(inputs), dim=dim, keepdim=keepdim)).
"""
# For a 1-D array x (any array along a single dimension),
# log sum exp(x) = s + log sum exp(x - s)
# with s = max(x) being a common choice.
if dim is None:
inputs = inputs.view(-1)
dim = 0
s, _ = torch.max(inputs, dim=dim, keepdim=True)
outputs = s + (inputs - s).exp().mean(dim=dim, keepdim=True).log()
if not keepdim:
outputs = outputs.squeeze(dim)
return outputs
softmax = nn.Softmax(dim=1)
def compute_distributions(p_x_given_class, labels, n_classes, ycond=False):
#import ipdb; ipdb.set_trace()
p_x = logmeanexp(p_x_given_class,dim=1)
p_class_given_x = softmax(p_x_given_class)
if ycond:
labels_onehot = indexes_to_one_hot(labels.cpu(),n_classes).byte().cuda()
p_x_given_y = torch.masked_select(p_x_given_class, labels_onehot)
p_y_given_x = torch.masked_select(p_class_given_x, labels_onehot)
else:
assert(p_x_given_class.shape[1] == 1)
p_x_given_y = p_x_given_class[:,0]
p_y_given_x = torch.ones_like(p_x)
pred = p_x_given_class.argmax(dim=1)
if not ycond:
pred -= 1
correct = pred.eq(labels.view_as(pred)).float().mean()
return p_x, p_y_given_x, p_x_given_y, pred, correct
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
intensity_normalization.normalize.gmm
fit three gaussians to the histogram of
skull-stripped image and normalize the WM mean
to some standard value
Author: Blake Dewey (blake.dewey@jhu.edu),
Jacob Reinhold (jacob.reinhold@jhu.edu)
Created on: Apr 24, 2018
"""
from __future__ import print_function, division
import logging
import nibabel as nib
try:
from sklearn.mixture import GaussianMixture
except ImportError:
from sklearn.mixture import GMM as GaussianMixture
from intensity_normalization.utilities.mask import gmm_class_mask
logger = logging.getLogger(__name__)
def gmm_normalize(img, brain_mask=None, norm_value=1, contrast='t1', bg_mask=None, wm_peak=None):
"""
normalize the white matter of an image using a GMM to find the tissue classes
Args:
img (nibabel.nifti1.Nifti1Image): target MR image
brain_mask (nibabel.nifti1.Nifti1Image): brain mask for img
norm_value (float): value at which to place the WM mean
contrast (str): MR contrast type for img
bg_mask (nibabel.nifti1.Nifti1Image): if provided, use to zero bkgd
wm_peak (float): previously calculated WM peak
Returns:
normalized (nibabel.nifti1.Nifti1Image): gmm wm peak normalized image
"""
if wm_peak is None:
wm_peak = gmm_class_mask(img, brain_mask=brain_mask, contrast=contrast)
img_data = img.get_data()
logger.info('Normalizing Data...')
norm_data = (img_data/wm_peak)*norm_value
norm_data[norm_data < 0.1] = 0.0
if bg_mask is not None:
logger.info('Applying background mask...')
masked_image = norm_data * bg_mask.get_data()
else:
masked_image = norm_data
normalized = nib.Nifti1Image(masked_image, img.affine, img.header)
return normalized
|
import sgqlc.types
ebucore_schema = sgqlc.types.Schema()
########################################################################
# Scalars and Enumerations
########################################################################
Boolean = sgqlc.types.Boolean
String = sgqlc.types.String
########################################################################
# Input Objects
########################################################################
########################################################################
# Output Objects and Interfaces
########################################################################
class Agent(sgqlc.types.Interface):
__schema__ = ebucore_schema
__field_names__ = ('agent_name', 'has_role')
agent_name = sgqlc.types.Field(String, graphql_name='agentName')
has_role = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(String))), graphql_name='hasRole')
class Asset(sgqlc.types.Interface):
__schema__ = ebucore_schema
__field_names__ = ('asset_id', 'title', 'abstract', 'date', 'has_contributor')
asset_id = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='assetId')
title = sgqlc.types.Field(String, graphql_name='title')
abstract = sgqlc.types.Field(String, graphql_name='abstract')
date = sgqlc.types.Field(String, graphql_name='date')
has_contributor = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(Agent))), graphql_name='hasContributor')
class BusinessObject(sgqlc.types.Interface):
__schema__ = ebucore_schema
__field_names__ = ('orientation', 'asset_id', 'title', 'abstract', 'date', 'has_contributor')
orientation = sgqlc.types.Field(String, graphql_name='orientation')
asset_id = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='assetId')
title = sgqlc.types.Field(String, graphql_name='title')
abstract = sgqlc.types.Field(String, graphql_name='abstract')
date = sgqlc.types.Field(String, graphql_name='date')
has_contributor = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(Agent))), graphql_name='hasContributor')
class EditorialObject(sgqlc.types.Interface):
__schema__ = ebucore_schema
__field_names__ = ('approved_by', 'is_distributed_on', 'orientation', 'asset_id', 'title', 'abstract', 'date', 'has_contributor')
approved_by = sgqlc.types.Field(Agent, graphql_name='approvedBy')
is_distributed_on = sgqlc.types.Field(String, graphql_name='isDistributedOn')
orientation = sgqlc.types.Field(String, graphql_name='orientation')
asset_id = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='assetId')
title = sgqlc.types.Field(String, graphql_name='title')
abstract = sgqlc.types.Field(String, graphql_name='abstract')
date = sgqlc.types.Field(String, graphql_name='date')
has_contributor = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(Agent))), graphql_name='hasContributor')
class Group(sgqlc.types.Interface):
__schema__ = ebucore_schema
__field_names__ = ('total_number_of_episodes', 'asset_id', 'title', 'abstract', 'date', 'has_contributor')
total_number_of_episodes = sgqlc.types.Field(String, graphql_name='totalNumberOfEpisodes')
asset_id = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='assetId')
title = sgqlc.types.Field(String, graphql_name='title')
abstract = sgqlc.types.Field(String, graphql_name='abstract')
date = sgqlc.types.Field(String, graphql_name='date')
has_contributor = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(Agent))), graphql_name='hasContributor')
class Person(sgqlc.types.Interface):
__schema__ = ebucore_schema
__field_names__ = ('agent_name', 'date_of_birth', 'has_role')
agent_name = sgqlc.types.Field(String, graphql_name='agentName')
date_of_birth = sgqlc.types.Field(String, graphql_name='dateOfBirth')
has_role = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(String))), graphql_name='hasRole')
class Query(sgqlc.types.Type):
__schema__ = ebucore_schema
__field_names__ = ('assets',)
assets = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(Asset)), graphql_name='assets', args=sgqlc.types.ArgDict((
('ids', sgqlc.types.Arg(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(String))), graphql_name='ids', default=None)),
))
)
class Character(sgqlc.types.Type, Person, Agent):
__schema__ = ebucore_schema
__field_names__ = ('given_name', 'family_name', 'character_name')
given_name = sgqlc.types.Field(String, graphql_name='givenName')
family_name = sgqlc.types.Field(String, graphql_name='familyName')
character_name = sgqlc.types.Field(String, graphql_name='characterName')
class Episode(sgqlc.types.Type, EditorialObject, BusinessObject, Asset):
__schema__ = ebucore_schema
__field_names__ = ('has_manifestation',)
has_manifestation = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null('MediaResource'))), graphql_name='hasManifestation')
class MediaResource(sgqlc.types.Type, Asset):
__schema__ = ebucore_schema
__field_names__ = ('has_format', 'has_manifestation')
has_format = sgqlc.types.Field(String, graphql_name='hasFormat')
has_manifestation = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null('MediaResource'))), graphql_name='hasManifestation')
class Series(sgqlc.types.Type, Group, Asset):
__schema__ = ebucore_schema
__field_names__ = ('has_manifestation',)
has_manifestation = sgqlc.types.Field(sgqlc.types.non_null(sgqlc.types.list_of(sgqlc.types.non_null(MediaResource))), graphql_name='hasManifestation')
class Staff(sgqlc.types.Type, Person, Agent):
__schema__ = ebucore_schema
__field_names__ = ('given_name', 'family_name')
given_name = sgqlc.types.Field(String, graphql_name='givenName')
family_name = sgqlc.types.Field(String, graphql_name='familyName')
########################################################################
# Unions
########################################################################
########################################################################
# Schema Entry Points
########################################################################
ebucore_schema.query_type = Query
ebucore_schema.mutation_type = None
ebucore_schema.subscription_type = None
|
import numpy as np
import scipy.sparse as sp
from scipy import linalg, optimize, sparse
from sklearn.datasets import load_iris, make_classification
from sklearn.metrics import log_loss
from sklearn.model_selection import StratifiedKFold
from sklearn.preprocessing import LabelEncoder
from sklearn.utils import compute_class_weight
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_raise_message
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_warns
from sklearn.utils.testing import ignore_warnings
from sklearn.utils.testing import assert_warns_message
from sklearn.utils.testing import raises
from sklearn.exceptions import ConvergenceWarning
from sklearn.linear_model.logistic import (
LogisticRegression,
logistic_regression_path, LogisticRegressionCV,
_logistic_loss_and_grad, _logistic_grad_hess,
_multinomial_grad_hess, _logistic_loss,
)
X = [[-1, 0], [0, 1], [1, 1]]
X_sp = sp.csr_matrix(X)
Y1 = [0, 1, 1]
Y2 = [2, 1, 0]
iris = load_iris()
def check_predictions(clf, X, y):
"""Check that the model is able to fit the classification data"""
n_samples = len(y)
classes = np.unique(y)
n_classes = classes.shape[0]
predicted = clf.fit(X, y).predict(X)
assert_array_equal(clf.classes_, classes)
assert_equal(predicted.shape, (n_samples,))
assert_array_equal(predicted, y)
probabilities = clf.predict_proba(X)
assert_equal(probabilities.shape, (n_samples, n_classes))
assert_array_almost_equal(probabilities.sum(axis=1), np.ones(n_samples))
assert_array_equal(probabilities.argmax(axis=1), y)
def test_predict_2_classes():
# Simple sanity check on a 2 classes dataset
# Make sure it predicts the correct result on simple datasets.
check_predictions(LogisticRegression(random_state=0), X, Y1)
check_predictions(LogisticRegression(random_state=0), X_sp, Y1)
check_predictions(LogisticRegression(C=100, random_state=0), X, Y1)
check_predictions(LogisticRegression(C=100, random_state=0), X_sp, Y1)
check_predictions(LogisticRegression(fit_intercept=False,
random_state=0), X, Y1)
check_predictions(LogisticRegression(fit_intercept=False,
random_state=0), X_sp, Y1)
def test_error():
# Test for appropriate exception on errors
msg = "Penalty term must be positive"
assert_raise_message(ValueError, msg,
LogisticRegression(C=-1).fit, X, Y1)
assert_raise_message(ValueError, msg,
LogisticRegression(C="test").fit, X, Y1)
for LR in [LogisticRegression, LogisticRegressionCV]:
msg = "Tolerance for stopping criteria must be positive"
assert_raise_message(ValueError, msg, LR(tol=-1).fit, X, Y1)
assert_raise_message(ValueError, msg, LR(tol="test").fit, X, Y1)
msg = "Maximum number of iteration must be positive"
assert_raise_message(ValueError, msg, LR(max_iter=-1).fit, X, Y1)
assert_raise_message(ValueError, msg, LR(max_iter="test").fit, X, Y1)
def test_lr_liblinear_warning():
n_samples, n_features = iris.data.shape
target = iris.target_names[iris.target]
lr = LogisticRegression(solver='liblinear', n_jobs=2)
assert_warns_message(UserWarning,
"'n_jobs' > 1 does not have any effect when"
" 'solver' is set to 'liblinear'. Got 'n_jobs'"
" = 2.",
lr.fit, iris.data, target)
def test_predict_3_classes():
check_predictions(LogisticRegression(C=10), X, Y2)
check_predictions(LogisticRegression(C=10), X_sp, Y2)
def test_predict_iris():
# Test logistic regression with the iris dataset
n_samples, n_features = iris.data.shape
target = iris.target_names[iris.target]
# Test that both multinomial and OvR solvers handle
# multiclass data correctly and give good accuracy
# score (>0.95) for the training data.
for clf in [LogisticRegression(C=len(iris.data)),
LogisticRegression(C=len(iris.data), solver='lbfgs',
multi_class='multinomial'),
LogisticRegression(C=len(iris.data), solver='newton-cg',
multi_class='multinomial'),
LogisticRegression(C=len(iris.data), solver='sag', tol=1e-2,
multi_class='ovr', random_state=42),
LogisticRegression(C=len(iris.data), solver='saga', tol=1e-2,
multi_class='ovr', random_state=42)
]:
clf.fit(iris.data, target)
assert_array_equal(np.unique(target), clf.classes_)
pred = clf.predict(iris.data)
assert_greater(np.mean(pred == target), .95)
probabilities = clf.predict_proba(iris.data)
assert_array_almost_equal(probabilities.sum(axis=1),
np.ones(n_samples))
pred = iris.target_names[probabilities.argmax(axis=1)]
assert_greater(np.mean(pred == target), .95)
def test_multinomial_validation():
for solver in ['lbfgs', 'newton-cg', 'sag', 'saga']:
lr = LogisticRegression(C=-1, solver=solver, multi_class='multinomial')
assert_raises(ValueError, lr.fit, [[0, 1], [1, 0]], [0, 1])
def test_check_solver_option():
X, y = iris.data, iris.target
for LR in [LogisticRegression, LogisticRegressionCV]:
msg = ('Logistic Regression supports only liblinear, newton-cg, '
'lbfgs, sag and saga solvers, got wrong_name')
lr = LR(solver="wrong_name")
assert_raise_message(ValueError, msg, lr.fit, X, y)
msg = "multi_class should be either multinomial or ovr, got wrong_name"
lr = LR(solver='newton-cg', multi_class="wrong_name")
assert_raise_message(ValueError, msg, lr.fit, X, y)
# only 'liblinear' solver
msg = "Solver liblinear does not support a multinomial backend."
lr = LR(solver='liblinear', multi_class='multinomial')
assert_raise_message(ValueError, msg, lr.fit, X, y)
# all solvers except 'liblinear'
for solver in ['newton-cg', 'lbfgs', 'sag']:
msg = ("Solver %s supports only l2 penalties, got l1 penalty." %
solver)
lr = LR(solver=solver, penalty='l1')
assert_raise_message(ValueError, msg, lr.fit, X, y)
for solver in ['newton-cg', 'lbfgs', 'sag', 'saga']:
msg = ("Solver %s supports only dual=False, got dual=True" %
solver)
lr = LR(solver=solver, dual=True)
assert_raise_message(ValueError, msg, lr.fit, X, y)
def test_multinomial_binary():
# Test multinomial LR on a binary problem.
target = (iris.target > 0).astype(np.intp)
target = np.array(["setosa", "not-setosa"])[target]
for solver in ['lbfgs', 'newton-cg', 'sag', 'saga']:
clf = LogisticRegression(solver=solver, multi_class='multinomial',
random_state=42, max_iter=2000)
clf.fit(iris.data, target)
assert_equal(clf.coef_.shape, (1, iris.data.shape[1]))
assert_equal(clf.intercept_.shape, (1,))
assert_array_equal(clf.predict(iris.data), target)
mlr = LogisticRegression(solver=solver, multi_class='multinomial',
random_state=42, fit_intercept=False)
mlr.fit(iris.data, target)
pred = clf.classes_[np.argmax(clf.predict_log_proba(iris.data),
axis=1)]
assert_greater(np.mean(pred == target), .9)
def test_sparsify():
# Test sparsify and densify members.
n_samples, n_features = iris.data.shape
target = iris.target_names[iris.target]
clf = LogisticRegression(random_state=0).fit(iris.data, target)
pred_d_d = clf.decision_function(iris.data)
clf.sparsify()
assert_true(sp.issparse(clf.coef_))
pred_s_d = clf.decision_function(iris.data)
sp_data = sp.coo_matrix(iris.data)
pred_s_s = clf.decision_function(sp_data)
clf.densify()
pred_d_s = clf.decision_function(sp_data)
assert_array_almost_equal(pred_d_d, pred_s_d)
assert_array_almost_equal(pred_d_d, pred_s_s)
assert_array_almost_equal(pred_d_d, pred_d_s)
def test_inconsistent_input():
# Test that an exception is raised on inconsistent input
rng = np.random.RandomState(0)
X_ = rng.random_sample((5, 10))
y_ = np.ones(X_.shape[0])
y_[0] = 0
clf = LogisticRegression(random_state=0)
# Wrong dimensions for training data
y_wrong = y_[:-1]
assert_raises(ValueError, clf.fit, X, y_wrong)
# Wrong dimensions for test data
assert_raises(ValueError, clf.fit(X_, y_).predict,
rng.random_sample((3, 12)))
def test_write_parameters():
# Test that we can write to coef_ and intercept_
clf = LogisticRegression(random_state=0)
clf.fit(X, Y1)
clf.coef_[:] = 0
clf.intercept_[:] = 0
assert_array_almost_equal(clf.decision_function(X), 0)
@raises(ValueError)
def test_nan():
# Test proper NaN handling.
# Regression test for Issue #252: fit used to go into an infinite loop.
Xnan = np.array(X, dtype=np.float64)
Xnan[0, 1] = np.nan
LogisticRegression(random_state=0).fit(Xnan, Y1)
def test_consistency_path():
# Test that the path algorithm is consistent
rng = np.random.RandomState(0)
X = np.concatenate((rng.randn(100, 2) + [1, 1], rng.randn(100, 2)))
y = [1] * 100 + [-1] * 100
Cs = np.logspace(0, 4, 10)
f = ignore_warnings
# can't test with fit_intercept=True since LIBLINEAR
# penalizes the intercept
for solver in ['sag', 'saga']:
coefs, Cs, _ = f(logistic_regression_path)(
X, y, Cs=Cs, fit_intercept=False, tol=1e-5, solver=solver,
max_iter=1000,
random_state=0)
for i, C in enumerate(Cs):
lr = LogisticRegression(C=C, fit_intercept=False, tol=1e-5,
solver=solver,
random_state=0)
lr.fit(X, y)
lr_coef = lr.coef_.ravel()
assert_array_almost_equal(lr_coef, coefs[i], decimal=4,
err_msg="with solver = %s" % solver)
# test for fit_intercept=True
for solver in ('lbfgs', 'newton-cg', 'liblinear', 'sag', 'saga'):
Cs = [1e3]
coefs, Cs, _ = f(logistic_regression_path)(
X, y, Cs=Cs, fit_intercept=True, tol=1e-6, solver=solver,
intercept_scaling=10000., random_state=0)
lr = LogisticRegression(C=Cs[0], fit_intercept=True, tol=1e-4,
intercept_scaling=10000., random_state=0)
lr.fit(X, y)
lr_coef = np.concatenate([lr.coef_.ravel(), lr.intercept_])
assert_array_almost_equal(lr_coef, coefs[0], decimal=4,
err_msg="with solver = %s" % solver)
def test_liblinear_dual_random_state():
# random_state is relevant for liblinear solver only if dual=True
X, y = make_classification(n_samples=20, random_state=0)
lr1 = LogisticRegression(random_state=0, dual=True, max_iter=1, tol=1e-15)
lr1.fit(X, y)
lr2 = LogisticRegression(random_state=0, dual=True, max_iter=1, tol=1e-15)
lr2.fit(X, y)
lr3 = LogisticRegression(random_state=8, dual=True, max_iter=1, tol=1e-15)
lr3.fit(X, y)
# same result for same random state
assert_array_almost_equal(lr1.coef_, lr2.coef_)
# different results for different random states
msg = "Arrays are not almost equal to 6 decimals"
assert_raise_message(AssertionError, msg,
assert_array_almost_equal, lr1.coef_, lr3.coef_)
def test_logistic_loss_and_grad():
X_ref, y = make_classification(n_samples=20, random_state=0)
n_features = X_ref.shape[1]
X_sp = X_ref.copy()
X_sp[X_sp < .1] = 0
X_sp = sp.csr_matrix(X_sp)
for X in (X_ref, X_sp):
w = np.zeros(n_features)
# First check that our derivation of the grad is correct
loss, grad = _logistic_loss_and_grad(w, X, y, alpha=1.)
approx_grad = optimize.approx_fprime(
w, lambda w: _logistic_loss_and_grad(w, X, y, alpha=1.)[0], 1e-3
)
assert_array_almost_equal(grad, approx_grad, decimal=2)
# Second check that our intercept implementation is good
w = np.zeros(n_features + 1)
loss_interp, grad_interp = _logistic_loss_and_grad(
w, X, y, alpha=1.
)
assert_array_almost_equal(loss, loss_interp)
approx_grad = optimize.approx_fprime(
w, lambda w: _logistic_loss_and_grad(w, X, y, alpha=1.)[0], 1e-3
)
assert_array_almost_equal(grad_interp, approx_grad, decimal=2)
def test_logistic_grad_hess():
rng = np.random.RandomState(0)
n_samples, n_features = 50, 5
X_ref = rng.randn(n_samples, n_features)
y = np.sign(X_ref.dot(5 * rng.randn(n_features)))
X_ref -= X_ref.mean()
X_ref /= X_ref.std()
X_sp = X_ref.copy()
X_sp[X_sp < .1] = 0
X_sp = sp.csr_matrix(X_sp)
for X in (X_ref, X_sp):
w = .1 * np.ones(n_features)
# First check that _logistic_grad_hess is consistent
# with _logistic_loss_and_grad
loss, grad = _logistic_loss_and_grad(w, X, y, alpha=1.)
grad_2, hess = _logistic_grad_hess(w, X, y, alpha=1.)
assert_array_almost_equal(grad, grad_2)
# Now check our hessian along the second direction of the grad
vector = np.zeros_like(grad)
vector[1] = 1
hess_col = hess(vector)
# Computation of the Hessian is particularly fragile to numerical
# errors when doing simple finite differences. Here we compute the
# grad along a path in the direction of the vector and then use a
# least-square regression to estimate the slope
e = 1e-3
d_x = np.linspace(-e, e, 30)
d_grad = np.array([
_logistic_loss_and_grad(w + t * vector, X, y, alpha=1.)[1]
for t in d_x
])
d_grad -= d_grad.mean(axis=0)
approx_hess_col = linalg.lstsq(d_x[:, np.newaxis], d_grad)[0].ravel()
assert_array_almost_equal(approx_hess_col, hess_col, decimal=3)
# Second check that our intercept implementation is good
w = np.zeros(n_features + 1)
loss_interp, grad_interp = _logistic_loss_and_grad(w, X, y, alpha=1.)
loss_interp_2 = _logistic_loss(w, X, y, alpha=1.)
grad_interp_2, hess = _logistic_grad_hess(w, X, y, alpha=1.)
assert_array_almost_equal(loss_interp, loss_interp_2)
assert_array_almost_equal(grad_interp, grad_interp_2)
def test_logistic_cv():
# test for LogisticRegressionCV object
n_samples, n_features = 50, 5
rng = np.random.RandomState(0)
X_ref = rng.randn(n_samples, n_features)
y = np.sign(X_ref.dot(5 * rng.randn(n_features)))
X_ref -= X_ref.mean()
X_ref /= X_ref.std()
lr_cv = LogisticRegressionCV(Cs=[1.], fit_intercept=False,
solver='liblinear')
lr_cv.fit(X_ref, y)
lr = LogisticRegression(C=1., fit_intercept=False)
lr.fit(X_ref, y)
assert_array_almost_equal(lr.coef_, lr_cv.coef_)
assert_array_equal(lr_cv.coef_.shape, (1, n_features))
assert_array_equal(lr_cv.classes_, [-1, 1])
assert_equal(len(lr_cv.classes_), 2)
coefs_paths = np.asarray(list(lr_cv.coefs_paths_.values()))
assert_array_equal(coefs_paths.shape, (1, 3, 1, n_features))
assert_array_equal(lr_cv.Cs_.shape, (1,))
scores = np.asarray(list(lr_cv.scores_.values()))
assert_array_equal(scores.shape, (1, 3, 1))
def test_multinomial_logistic_regression_string_inputs():
# Test with string labels for LogisticRegression(CV)
n_samples, n_features, n_classes = 50, 5, 3
X_ref, y = make_classification(n_samples=n_samples, n_features=n_features,
n_classes=n_classes, n_informative=3,
random_state=0)
y_str = LabelEncoder().fit(['bar', 'baz', 'foo']).inverse_transform(y)
# For numerical labels, let y values be taken from set (-1, 0, 1)
y = np.array(y) - 1
# Test for string labels
lr = LogisticRegression(solver='lbfgs', multi_class='multinomial')
lr_cv = LogisticRegressionCV(solver='lbfgs', multi_class='multinomial')
lr_str = LogisticRegression(solver='lbfgs', multi_class='multinomial')
lr_cv_str = LogisticRegressionCV(solver='lbfgs', multi_class='multinomial')
lr.fit(X_ref, y)
lr_cv.fit(X_ref, y)
lr_str.fit(X_ref, y_str)
lr_cv_str.fit(X_ref, y_str)
assert_array_almost_equal(lr.coef_, lr_str.coef_)
assert_equal(sorted(lr_str.classes_), ['bar', 'baz', 'foo'])
assert_array_almost_equal(lr_cv.coef_, lr_cv_str.coef_)
assert_equal(sorted(lr_str.classes_), ['bar', 'baz', 'foo'])
assert_equal(sorted(lr_cv_str.classes_), ['bar', 'baz', 'foo'])
# The predictions should be in original labels
assert_equal(sorted(np.unique(lr_str.predict(X_ref))),
['bar', 'baz', 'foo'])
assert_equal(sorted(np.unique(lr_cv_str.predict(X_ref))),
['bar', 'baz', 'foo'])
# Make sure class weights can be given with string labels
lr_cv_str = LogisticRegression(
solver='lbfgs', class_weight={'bar': 1, 'baz': 2, 'foo': 0},
multi_class='multinomial').fit(X_ref, y_str)
assert_equal(sorted(np.unique(lr_cv_str.predict(X_ref))), ['bar', 'baz'])
def test_logistic_cv_sparse():
X, y = make_classification(n_samples=50, n_features=5,
random_state=0)
X[X < 1.0] = 0.0
csr = sp.csr_matrix(X)
clf = LogisticRegressionCV(fit_intercept=True)
clf.fit(X, y)
clfs = LogisticRegressionCV(fit_intercept=True)
clfs.fit(csr, y)
assert_array_almost_equal(clfs.coef_, clf.coef_)
assert_array_almost_equal(clfs.intercept_, clf.intercept_)
assert_equal(clfs.C_, clf.C_)
def test_intercept_logistic_helper():
n_samples, n_features = 10, 5
X, y = make_classification(n_samples=n_samples, n_features=n_features,
random_state=0)
# Fit intercept case.
alpha = 1.
w = np.ones(n_features + 1)
grad_interp, hess_interp = _logistic_grad_hess(w, X, y, alpha)
loss_interp = _logistic_loss(w, X, y, alpha)
# Do not fit intercept. This can be considered equivalent to adding
# a feature vector of ones, i.e column of one vectors.
X_ = np.hstack((X, np.ones(10)[:, np.newaxis]))
grad, hess = _logistic_grad_hess(w, X_, y, alpha)
loss = _logistic_loss(w, X_, y, alpha)
# In the fit_intercept=False case, the feature vector of ones is
# penalized. This should be taken care of.
assert_almost_equal(loss_interp + 0.5 * (w[-1] ** 2), loss)
# Check gradient.
assert_array_almost_equal(grad_interp[:n_features], grad[:n_features])
assert_almost_equal(grad_interp[-1] + alpha * w[-1], grad[-1])
rng = np.random.RandomState(0)
grad = rng.rand(n_features + 1)
hess_interp = hess_interp(grad)
hess = hess(grad)
assert_array_almost_equal(hess_interp[:n_features], hess[:n_features])
assert_almost_equal(hess_interp[-1] + alpha * grad[-1], hess[-1])
def test_ovr_multinomial_iris():
# Test that OvR and multinomial are correct using the iris dataset.
train, target = iris.data, iris.target
n_samples, n_features = train.shape
# The cv indices from stratified kfold (where stratification is done based
# on the fine-grained iris classes, i.e, before the classes 0 and 1 are
# conflated) is used for both clf and clf1
n_cv = 2
cv = StratifiedKFold(n_cv)
precomputed_folds = list(cv.split(train, target))
# Train clf on the original dataset where classes 0 and 1 are separated
clf = LogisticRegressionCV(cv=precomputed_folds)
clf.fit(train, target)
# Conflate classes 0 and 1 and train clf1 on this modified dataset
clf1 = LogisticRegressionCV(cv=precomputed_folds)
target_copy = target.copy()
target_copy[target_copy == 0] = 1
clf1.fit(train, target_copy)
# Ensure that what OvR learns for class2 is same regardless of whether
# classes 0 and 1 are separated or not
assert_array_almost_equal(clf.scores_[2], clf1.scores_[2])
assert_array_almost_equal(clf.intercept_[2:], clf1.intercept_)
assert_array_almost_equal(clf.coef_[2][np.newaxis, :], clf1.coef_)
# Test the shape of various attributes.
assert_equal(clf.coef_.shape, (3, n_features))
assert_array_equal(clf.classes_, [0, 1, 2])
coefs_paths = np.asarray(list(clf.coefs_paths_.values()))
assert_array_almost_equal(coefs_paths.shape, (3, n_cv, 10, n_features + 1))
assert_equal(clf.Cs_.shape, (10,))
scores = np.asarray(list(clf.scores_.values()))
assert_equal(scores.shape, (3, n_cv, 10))
# Test that for the iris data multinomial gives a better accuracy than OvR
for solver in ['lbfgs', 'newton-cg', 'sag', 'saga']:
max_iter = 2000 if solver in ['sag', 'saga'] else 15
clf_multi = LogisticRegressionCV(
solver=solver, multi_class='multinomial', max_iter=max_iter,
random_state=42, tol=1e-5 if solver in ['sag', 'saga'] else 1e-2,
cv=2)
clf_multi.fit(train, target)
multi_score = clf_multi.score(train, target)
ovr_score = clf.score(train, target)
assert_greater(multi_score, ovr_score)
# Test attributes of LogisticRegressionCV
assert_equal(clf.coef_.shape, clf_multi.coef_.shape)
assert_array_equal(clf_multi.classes_, [0, 1, 2])
coefs_paths = np.asarray(list(clf_multi.coefs_paths_.values()))
assert_array_almost_equal(coefs_paths.shape, (3, n_cv, 10,
n_features + 1))
assert_equal(clf_multi.Cs_.shape, (10,))
scores = np.asarray(list(clf_multi.scores_.values()))
assert_equal(scores.shape, (3, n_cv, 10))
def test_logistic_regression_solvers():
X, y = make_classification(n_features=10, n_informative=5, random_state=0)
ncg = LogisticRegression(solver='newton-cg', fit_intercept=False)
lbf = LogisticRegression(solver='lbfgs', fit_intercept=False)
lib = LogisticRegression(fit_intercept=False)
sag = LogisticRegression(solver='sag', fit_intercept=False,
random_state=42)
saga = LogisticRegression(solver='saga', fit_intercept=False,
random_state=42)
ncg.fit(X, y)
lbf.fit(X, y)
sag.fit(X, y)
saga.fit(X, y)
lib.fit(X, y)
assert_array_almost_equal(ncg.coef_, lib.coef_, decimal=3)
assert_array_almost_equal(lib.coef_, lbf.coef_, decimal=3)
assert_array_almost_equal(ncg.coef_, lbf.coef_, decimal=3)
assert_array_almost_equal(sag.coef_, lib.coef_, decimal=3)
assert_array_almost_equal(sag.coef_, ncg.coef_, decimal=3)
assert_array_almost_equal(sag.coef_, lbf.coef_, decimal=3)
assert_array_almost_equal(saga.coef_, sag.coef_, decimal=3)
assert_array_almost_equal(saga.coef_, lbf.coef_, decimal=3)
assert_array_almost_equal(saga.coef_, ncg.coef_, decimal=3)
assert_array_almost_equal(saga.coef_, lib.coef_, decimal=3)
def test_logistic_regression_solvers_multiclass():
X, y = make_classification(n_samples=20, n_features=20, n_informative=10,
n_classes=3, random_state=0)
tol = 1e-7
ncg = LogisticRegression(solver='newton-cg', fit_intercept=False, tol=tol)
lbf = LogisticRegression(solver='lbfgs', fit_intercept=False, tol=tol)
lib = LogisticRegression(fit_intercept=False, tol=tol)
sag = LogisticRegression(solver='sag', fit_intercept=False, tol=tol,
max_iter=1000, random_state=42)
saga = LogisticRegression(solver='saga', fit_intercept=False, tol=tol,
max_iter=10000, random_state=42)
ncg.fit(X, y)
lbf.fit(X, y)
sag.fit(X, y)
saga.fit(X, y)
lib.fit(X, y)
assert_array_almost_equal(ncg.coef_, lib.coef_, decimal=4)
assert_array_almost_equal(lib.coef_, lbf.coef_, decimal=4)
assert_array_almost_equal(ncg.coef_, lbf.coef_, decimal=4)
assert_array_almost_equal(sag.coef_, lib.coef_, decimal=4)
assert_array_almost_equal(sag.coef_, ncg.coef_, decimal=4)
assert_array_almost_equal(sag.coef_, lbf.coef_, decimal=4)
assert_array_almost_equal(saga.coef_, sag.coef_, decimal=4)
assert_array_almost_equal(saga.coef_, lbf.coef_, decimal=4)
assert_array_almost_equal(saga.coef_, ncg.coef_, decimal=4)
assert_array_almost_equal(saga.coef_, lib.coef_, decimal=4)
def test_logistic_regressioncv_class_weights():
for weight in [{0: 0.1, 1: 0.2}, {0: 0.1, 1: 0.2, 2: 0.5}]:
n_classes = len(weight)
for class_weight in (weight, 'balanced'):
X, y = make_classification(n_samples=30, n_features=3,
n_repeated=0,
n_informative=3, n_redundant=0,
n_classes=n_classes, random_state=0)
clf_lbf = LogisticRegressionCV(solver='lbfgs', Cs=1,
fit_intercept=False,
class_weight=class_weight)
clf_ncg = LogisticRegressionCV(solver='newton-cg', Cs=1,
fit_intercept=False,
class_weight=class_weight)
clf_lib = LogisticRegressionCV(solver='liblinear', Cs=1,
fit_intercept=False,
class_weight=class_weight)
clf_sag = LogisticRegressionCV(solver='sag', Cs=1,
fit_intercept=False,
class_weight=class_weight,
tol=1e-5, max_iter=10000,
random_state=0)
clf_saga = LogisticRegressionCV(solver='saga', Cs=1,
fit_intercept=False,
class_weight=class_weight,
tol=1e-5, max_iter=10000,
random_state=0)
clf_lbf.fit(X, y)
clf_ncg.fit(X, y)
clf_lib.fit(X, y)
clf_sag.fit(X, y)
clf_saga.fit(X, y)
assert_array_almost_equal(clf_lib.coef_, clf_lbf.coef_, decimal=4)
assert_array_almost_equal(clf_ncg.coef_, clf_lbf.coef_, decimal=4)
assert_array_almost_equal(clf_sag.coef_, clf_lbf.coef_, decimal=4)
assert_array_almost_equal(clf_saga.coef_, clf_lbf.coef_, decimal=4)
def test_logistic_regression_sample_weights():
X, y = make_classification(n_samples=20, n_features=5, n_informative=3,
n_classes=2, random_state=0)
sample_weight = y + 1
for LR in [LogisticRegression, LogisticRegressionCV]:
# Test that passing sample_weight as ones is the same as
# not passing them at all (default None)
for solver in ['lbfgs', 'liblinear']:
clf_sw_none = LR(solver=solver, fit_intercept=False,
random_state=42)
clf_sw_none.fit(X, y)
clf_sw_ones = LR(solver=solver, fit_intercept=False,
random_state=42)
clf_sw_ones.fit(X, y, sample_weight=np.ones(y.shape[0]))
assert_array_almost_equal(
clf_sw_none.coef_, clf_sw_ones.coef_, decimal=4)
# Test that sample weights work the same with the lbfgs,
# newton-cg, and 'sag' solvers
clf_sw_lbfgs = LR(solver='lbfgs', fit_intercept=False, random_state=42)
clf_sw_lbfgs.fit(X, y, sample_weight=sample_weight)
clf_sw_n = LR(solver='newton-cg', fit_intercept=False, random_state=42)
clf_sw_n.fit(X, y, sample_weight=sample_weight)
clf_sw_sag = LR(solver='sag', fit_intercept=False, tol=1e-10,
random_state=42)
# ignore convergence warning due to small dataset
with ignore_warnings():
clf_sw_sag.fit(X, y, sample_weight=sample_weight)
clf_sw_liblinear = LR(solver='liblinear', fit_intercept=False,
random_state=42)
clf_sw_liblinear.fit(X, y, sample_weight=sample_weight)
assert_array_almost_equal(
clf_sw_lbfgs.coef_, clf_sw_n.coef_, decimal=4)
assert_array_almost_equal(
clf_sw_lbfgs.coef_, clf_sw_sag.coef_, decimal=4)
assert_array_almost_equal(
clf_sw_lbfgs.coef_, clf_sw_liblinear.coef_, decimal=4)
# Test that passing class_weight as [1,2] is the same as
# passing class weight = [1,1] but adjusting sample weights
# to be 2 for all instances of class 2
for solver in ['lbfgs', 'liblinear']:
clf_cw_12 = LR(solver=solver, fit_intercept=False,
class_weight={0: 1, 1: 2}, random_state=42)
clf_cw_12.fit(X, y)
clf_sw_12 = LR(solver=solver, fit_intercept=False, random_state=42)
clf_sw_12.fit(X, y, sample_weight=sample_weight)
assert_array_almost_equal(
clf_cw_12.coef_, clf_sw_12.coef_, decimal=4)
# Test the above for l1 penalty and l2 penalty with dual=True.
# since the patched liblinear code is different.
clf_cw = LogisticRegression(
solver="liblinear", fit_intercept=False, class_weight={0: 1, 1: 2},
penalty="l1", tol=1e-5, random_state=42)
clf_cw.fit(X, y)
clf_sw = LogisticRegression(
solver="liblinear", fit_intercept=False, penalty="l1", tol=1e-5,
random_state=42)
clf_sw.fit(X, y, sample_weight)
assert_array_almost_equal(clf_cw.coef_, clf_sw.coef_, decimal=4)
clf_cw = LogisticRegression(
solver="liblinear", fit_intercept=False, class_weight={0: 1, 1: 2},
penalty="l2", dual=True, random_state=42)
clf_cw.fit(X, y)
clf_sw = LogisticRegression(
solver="liblinear", fit_intercept=False, penalty="l2", dual=True,
random_state=42)
clf_sw.fit(X, y, sample_weight)
assert_array_almost_equal(clf_cw.coef_, clf_sw.coef_, decimal=4)
def _compute_class_weight_dictionary(y):
# helper for returning a dictionary instead of an array
classes = np.unique(y)
class_weight = compute_class_weight("balanced", classes, y)
class_weight_dict = dict(zip(classes, class_weight))
return class_weight_dict
def test_logistic_regression_class_weights():
# Multinomial case: remove 90% of class 0
X = iris.data[45:, :]
y = iris.target[45:]
solvers = ("lbfgs", "newton-cg")
class_weight_dict = _compute_class_weight_dictionary(y)
for solver in solvers:
clf1 = LogisticRegression(solver=solver, multi_class="multinomial",
class_weight="balanced")
clf2 = LogisticRegression(solver=solver, multi_class="multinomial",
class_weight=class_weight_dict)
clf1.fit(X, y)
clf2.fit(X, y)
assert_array_almost_equal(clf1.coef_, clf2.coef_, decimal=4)
# Binary case: remove 90% of class 0 and 100% of class 2
X = iris.data[45:100, :]
y = iris.target[45:100]
solvers = ("lbfgs", "newton-cg", "liblinear")
class_weight_dict = _compute_class_weight_dictionary(y)
for solver in solvers:
clf1 = LogisticRegression(solver=solver, multi_class="ovr",
class_weight="balanced")
clf2 = LogisticRegression(solver=solver, multi_class="ovr",
class_weight=class_weight_dict)
clf1.fit(X, y)
clf2.fit(X, y)
assert_array_almost_equal(clf1.coef_, clf2.coef_, decimal=6)
def test_logistic_regression_convergence_warnings():
# Test that warnings are raised if model does not converge
X, y = make_classification(n_samples=20, n_features=20, random_state=0)
clf_lib = LogisticRegression(solver='liblinear', max_iter=2, verbose=1)
assert_warns(ConvergenceWarning, clf_lib.fit, X, y)
assert_equal(clf_lib.n_iter_, 2)
def test_logistic_regression_multinomial():
# Tests for the multinomial option in logistic regression
# Some basic attributes of Logistic Regression
n_samples, n_features, n_classes = 50, 20, 3
X, y = make_classification(n_samples=n_samples,
n_features=n_features,
n_informative=10,
n_classes=n_classes, random_state=0)
# 'lbfgs' is used as a referenced
solver = 'lbfgs'
ref_i = LogisticRegression(solver=solver, multi_class='multinomial')
ref_w = LogisticRegression(solver=solver, multi_class='multinomial',
fit_intercept=False)
ref_i.fit(X, y)
ref_w.fit(X, y)
assert_array_equal(ref_i.coef_.shape, (n_classes, n_features))
assert_array_equal(ref_w.coef_.shape, (n_classes, n_features))
for solver in ['sag', 'saga', 'newton-cg']:
clf_i = LogisticRegression(solver=solver, multi_class='multinomial',
random_state=42, max_iter=2000, tol=1e-7,
)
clf_w = LogisticRegression(solver=solver, multi_class='multinomial',
random_state=42, max_iter=2000, tol=1e-7,
fit_intercept=False)
clf_i.fit(X, y)
clf_w.fit(X, y)
assert_array_equal(clf_i.coef_.shape, (n_classes, n_features))
assert_array_equal(clf_w.coef_.shape, (n_classes, n_features))
# Compare solutions between lbfgs and the other solvers
assert_almost_equal(ref_i.coef_, clf_i.coef_, decimal=3)
assert_almost_equal(ref_w.coef_, clf_w.coef_, decimal=3)
assert_almost_equal(ref_i.intercept_, clf_i.intercept_, decimal=3)
# Test that the path give almost the same results. However since in this
# case we take the average of the coefs after fitting across all the
# folds, it need not be exactly the same.
for solver in ['lbfgs', 'newton-cg', 'sag', 'saga']:
clf_path = LogisticRegressionCV(solver=solver, max_iter=2000, tol=1e-6,
multi_class='multinomial', Cs=[1.])
clf_path.fit(X, y)
assert_array_almost_equal(clf_path.coef_, ref_i.coef_, decimal=3)
assert_almost_equal(clf_path.intercept_, ref_i.intercept_, decimal=3)
def test_multinomial_grad_hess():
rng = np.random.RandomState(0)
n_samples, n_features, n_classes = 100, 5, 3
X = rng.randn(n_samples, n_features)
w = rng.rand(n_classes, n_features)
Y = np.zeros((n_samples, n_classes))
ind = np.argmax(np.dot(X, w.T), axis=1)
Y[range(0, n_samples), ind] = 1
w = w.ravel()
sample_weights = np.ones(X.shape[0])
grad, hessp = _multinomial_grad_hess(w, X, Y, alpha=1.,
sample_weight=sample_weights)
# extract first column of hessian matrix
vec = np.zeros(n_features * n_classes)
vec[0] = 1
hess_col = hessp(vec)
# Estimate hessian using least squares as done in
# test_logistic_grad_hess
e = 1e-3
d_x = np.linspace(-e, e, 30)
d_grad = np.array([
_multinomial_grad_hess(w + t * vec, X, Y, alpha=1.,
sample_weight=sample_weights)[0]
for t in d_x
])
d_grad -= d_grad.mean(axis=0)
approx_hess_col = linalg.lstsq(d_x[:, np.newaxis], d_grad)[0].ravel()
assert_array_almost_equal(hess_col, approx_hess_col)
def test_liblinear_decision_function_zero():
# Test negative prediction when decision_function values are zero.
# Liblinear predicts the positive class when decision_function values
# are zero. This is a test to verify that we do not do the same.
# See Issue: https://github.com/scikit-learn/scikit-learn/issues/3600
# and the PR https://github.com/scikit-learn/scikit-learn/pull/3623
X, y = make_classification(n_samples=5, n_features=5, random_state=0)
clf = LogisticRegression(fit_intercept=False)
clf.fit(X, y)
# Dummy data such that the decision function becomes zero.
X = np.zeros((5, 5))
assert_array_equal(clf.predict(X), np.zeros(5))
def test_liblinear_logregcv_sparse():
# Test LogRegCV with solver='liblinear' works for sparse matrices
X, y = make_classification(n_samples=10, n_features=5, random_state=0)
clf = LogisticRegressionCV(solver='liblinear')
clf.fit(sparse.csr_matrix(X), y)
def test_saga_sparse():
# Test LogRegCV with solver='liblinear' works for sparse matrices
X, y = make_classification(n_samples=10, n_features=5, random_state=0)
clf = LogisticRegressionCV(solver='saga')
clf.fit(sparse.csr_matrix(X), y)
def test_logreg_intercept_scaling():
# Test that the right error message is thrown when intercept_scaling <= 0
for i in [-1, 0]:
clf = LogisticRegression(intercept_scaling=i)
msg = ('Intercept scaling is %r but needs to be greater than 0.'
' To disable fitting an intercept,'
' set fit_intercept=False.' % clf.intercept_scaling)
assert_raise_message(ValueError, msg, clf.fit, X, Y1)
def test_logreg_intercept_scaling_zero():
# Test that intercept_scaling is ignored when fit_intercept is False
clf = LogisticRegression(fit_intercept=False)
clf.fit(X, Y1)
assert_equal(clf.intercept_, 0.)
def test_logreg_l1():
# Because liblinear penalizes the intercept and saga does not, we do not
# fit the intercept to make it possible to compare the coefficients of
# the two models at convergence.
rng = np.random.RandomState(42)
n_samples = 50
X, y = make_classification(n_samples=n_samples, n_features=20,
random_state=0)
X_noise = rng.normal(size=(n_samples, 3))
X_constant = np.ones(shape=(n_samples, 2))
X = np.concatenate((X, X_noise, X_constant), axis=1)
lr_liblinear = LogisticRegression(penalty="l1", C=1.0, solver='liblinear',
fit_intercept=False,
tol=1e-10)
lr_liblinear.fit(X, y)
lr_saga = LogisticRegression(penalty="l1", C=1.0, solver='saga',
fit_intercept=False,
max_iter=1000, tol=1e-10)
lr_saga.fit(X, y)
assert_array_almost_equal(lr_saga.coef_, lr_liblinear.coef_)
# Noise and constant features should be regularized to zero by the l1
# penalty
assert_array_almost_equal(lr_liblinear.coef_[0, -5:], np.zeros(5))
assert_array_almost_equal(lr_saga.coef_[0, -5:], np.zeros(5))
def test_logreg_l1_sparse_data():
# Because liblinear penalizes the intercept and saga does not, we do not
# fit the intercept to make it possible to compare the coefficients of
# the two models at convergence.
rng = np.random.RandomState(42)
n_samples = 50
X, y = make_classification(n_samples=n_samples, n_features=20,
random_state=0)
X_noise = rng.normal(scale=0.1, size=(n_samples, 3))
X_constant = np.zeros(shape=(n_samples, 2))
X = np.concatenate((X, X_noise, X_constant), axis=1)
X[X < 1] = 0
X = sparse.csr_matrix(X)
lr_liblinear = LogisticRegression(penalty="l1", C=1.0, solver='liblinear',
fit_intercept=False,
tol=1e-10)
lr_liblinear.fit(X, y)
lr_saga = LogisticRegression(penalty="l1", C=1.0, solver='saga',
fit_intercept=False,
max_iter=1000, tol=1e-10)
lr_saga.fit(X, y)
assert_array_almost_equal(lr_saga.coef_, lr_liblinear.coef_)
# Noise and constant features should be regularized to zero by the l1
# penalty
assert_array_almost_equal(lr_liblinear.coef_[0, -5:], np.zeros(5))
assert_array_almost_equal(lr_saga.coef_[0, -5:], np.zeros(5))
# Check that solving on the sparse and dense data yield the same results
lr_saga_dense = LogisticRegression(penalty="l1", C=1.0, solver='saga',
fit_intercept=False,
max_iter=1000, tol=1e-10)
lr_saga_dense.fit(X.toarray(), y)
assert_array_almost_equal(lr_saga.coef_, lr_saga_dense.coef_)
def test_logreg_cv_penalty():
# Test that the correct penalty is passed to the final fit.
X, y = make_classification(n_samples=50, n_features=20, random_state=0)
lr_cv = LogisticRegressionCV(penalty="l1", Cs=[1.0], solver='liblinear')
lr_cv.fit(X, y)
lr = LogisticRegression(penalty="l1", C=1.0, solver='liblinear')
lr.fit(X, y)
assert_equal(np.count_nonzero(lr_cv.coef_), np.count_nonzero(lr.coef_))
def test_logreg_predict_proba_multinomial():
X, y = make_classification(n_samples=10, n_features=20, random_state=0,
n_classes=3, n_informative=10)
# Predicted probabilities using the true-entropy loss should give a
# smaller loss than those using the ovr method.
clf_multi = LogisticRegression(multi_class="multinomial", solver="lbfgs")
clf_multi.fit(X, y)
clf_multi_loss = log_loss(y, clf_multi.predict_proba(X))
clf_ovr = LogisticRegression(multi_class="ovr", solver="lbfgs")
clf_ovr.fit(X, y)
clf_ovr_loss = log_loss(y, clf_ovr.predict_proba(X))
assert_greater(clf_ovr_loss, clf_multi_loss)
# Predicted probabilities using the soft-max function should give a
# smaller loss than those using the logistic function.
clf_multi_loss = log_loss(y, clf_multi.predict_proba(X))
clf_wrong_loss = log_loss(y, clf_multi._predict_proba_lr(X))
assert_greater(clf_wrong_loss, clf_multi_loss)
@ignore_warnings
def test_max_iter():
# Test that the maximum number of iteration is reached
X, y_bin = iris.data, iris.target.copy()
y_bin[y_bin == 2] = 0
solvers = ['newton-cg', 'liblinear', 'sag', 'saga', 'lbfgs']
for max_iter in range(1, 5):
for solver in solvers:
for multi_class in ['ovr', 'multinomial']:
if solver == 'liblinear' and multi_class == 'multinomial':
continue
lr = LogisticRegression(max_iter=max_iter, tol=1e-15,
multi_class=multi_class,
random_state=0, solver=solver)
lr.fit(X, y_bin)
assert_equal(lr.n_iter_[0], max_iter)
def test_n_iter():
# Test that self.n_iter_ has the correct format.
X, y = iris.data, iris.target
y_bin = y.copy()
y_bin[y_bin == 2] = 0
n_Cs = 4
n_cv_fold = 2
for solver in ['newton-cg', 'liblinear', 'sag', 'saga', 'lbfgs']:
# OvR case
n_classes = 1 if solver == 'liblinear' else np.unique(y).shape[0]
clf = LogisticRegression(tol=1e-2, multi_class='ovr',
solver=solver, C=1.,
random_state=42, max_iter=100)
clf.fit(X, y)
assert_equal(clf.n_iter_.shape, (n_classes,))
n_classes = np.unique(y).shape[0]
clf = LogisticRegressionCV(tol=1e-2, multi_class='ovr',
solver=solver, Cs=n_Cs, cv=n_cv_fold,
random_state=42, max_iter=100)
clf.fit(X, y)
assert_equal(clf.n_iter_.shape, (n_classes, n_cv_fold, n_Cs))
clf.fit(X, y_bin)
assert_equal(clf.n_iter_.shape, (1, n_cv_fold, n_Cs))
# multinomial case
n_classes = 1
if solver in ('liblinear', 'sag', 'saga'):
break
clf = LogisticRegression(tol=1e-2, multi_class='multinomial',
solver=solver, C=1.,
random_state=42, max_iter=100)
clf.fit(X, y)
assert_equal(clf.n_iter_.shape, (n_classes,))
clf = LogisticRegressionCV(tol=1e-2, multi_class='multinomial',
solver=solver, Cs=n_Cs, cv=n_cv_fold,
random_state=42, max_iter=100)
clf.fit(X, y)
assert_equal(clf.n_iter_.shape, (n_classes, n_cv_fold, n_Cs))
clf.fit(X, y_bin)
assert_equal(clf.n_iter_.shape, (1, n_cv_fold, n_Cs))
def test_warm_start():
# A 1-iteration second fit on same data should give almost same result
# with warm starting, and quite different result without warm starting.
# Warm starting does not work with liblinear solver.
X, y = iris.data, iris.target
solvers = ['newton-cg', 'sag', 'saga', 'lbfgs']
for warm_start in [True, False]:
for fit_intercept in [True, False]:
for solver in solvers:
for multi_class in ['ovr', 'multinomial']:
clf = LogisticRegression(tol=1e-4, multi_class=multi_class,
warm_start=warm_start,
solver=solver,
random_state=42, max_iter=100,
fit_intercept=fit_intercept)
with ignore_warnings(category=ConvergenceWarning):
clf.fit(X, y)
coef_1 = clf.coef_
clf.max_iter = 1
clf.fit(X, y)
cum_diff = np.sum(np.abs(coef_1 - clf.coef_))
msg = ("Warm starting issue with %s solver in %s mode "
"with fit_intercept=%s and warm_start=%s"
% (solver, multi_class, str(fit_intercept),
str(warm_start)))
if warm_start:
assert_greater(2.0, cum_diff, msg)
else:
assert_greater(cum_diff, 2.0, msg)
def test_saga_vs_liblinear():
iris = load_iris()
X, y = iris.data, iris.target
X = np.concatenate([X] * 10)
y = np.concatenate([y] * 10)
X_bin = X[y <= 1]
y_bin = y[y <= 1] * 2 - 1
X_sparse, y_sparse = make_classification(n_samples=50, n_features=20,
random_state=0)
X_sparse = sparse.csr_matrix(X_sparse)
for (X, y) in ((X_bin, y_bin), (X_sparse, y_sparse)):
for penalty in ['l1', 'l2']:
n_samples = X.shape[0]
# alpha=1e-3 is time consuming
for alpha in np.logspace(-1, 1, 3):
saga = LogisticRegression(
C=1. / (n_samples * alpha),
solver='saga',
multi_class='ovr',
max_iter=200,
fit_intercept=False,
penalty=penalty, random_state=0, tol=1e-24)
liblinear = LogisticRegression(
C=1. / (n_samples * alpha),
solver='liblinear',
multi_class='ovr',
max_iter=200,
fit_intercept=False,
penalty=penalty, random_state=0, tol=1e-24)
saga.fit(X, y)
liblinear.fit(X, y)
# Convergence for alpha=1e-3 is very slow
assert_array_almost_equal(saga.coef_, liblinear.coef_, 3)
def test_dtype_match():
# Test that np.float32 input data is not cast to np.float64 when possible
X_32 = np.array(X).astype(np.float32)
y_32 = np.array(Y1).astype(np.float32)
X_64 = np.array(X).astype(np.float64)
y_64 = np.array(Y1).astype(np.float64)
X_sparse_32 = sp.csr_matrix(X, dtype=np.float32)
for solver in ['newton-cg']:
for multi_class in ['ovr', 'multinomial']:
# Check type consistency
lr_32 = LogisticRegression(solver=solver, multi_class=multi_class)
lr_32.fit(X_32, y_32)
assert_equal(lr_32.coef_.dtype, X_32.dtype)
# check consistency with sparsity
lr_32_sparse = LogisticRegression(solver=solver,
multi_class=multi_class)
lr_32_sparse.fit(X_sparse_32, y_32)
assert_equal(lr_32_sparse.coef_.dtype, X_sparse_32.dtype)
# Check accuracy consistency
lr_64 = LogisticRegression(solver=solver, multi_class=multi_class)
lr_64.fit(X_64, y_64)
assert_equal(lr_64.coef_.dtype, X_64.dtype)
assert_almost_equal(lr_32.coef_, lr_64.coef_.astype(np.float32))
|
# As a test suite for the os module, this is woefully inadequate, but this
# does add tests for a few functions which have been determined to be more
# portable than they had been thought to be.
import asynchat
import asyncore
import codecs
import contextlib
import decimal
import errno
import fnmatch
import fractions
import itertools
import locale
import mmap
import os
import pickle
import shutil
import signal
import socket
import stat
import subprocess
import sys
import sysconfig
import tempfile
import threading
import time
import types
import unittest
import uuid
import warnings
from test import support
from test.support import os_helper
from test.support import socket_helper
from test.support import threading_helper
from test.support import warnings_helper
from platform import win32_is_iot
try:
import resource
except ImportError:
resource = None
try:
import fcntl
except ImportError:
fcntl = None
try:
import _winapi
except ImportError:
_winapi = None
try:
import pwd
all_users = [u.pw_uid for u in pwd.getpwall()]
except (ImportError, AttributeError):
all_users = []
try:
from _testcapi import INT_MAX, PY_SSIZE_T_MAX
except ImportError:
INT_MAX = PY_SSIZE_T_MAX = sys.maxsize
from test.support.script_helper import assert_python_ok
from test.support import unix_shell
from test.support.os_helper import FakePath
root_in_posix = False
if hasattr(os, 'geteuid'):
root_in_posix = (os.geteuid() == 0)
# Detect whether we're on a Linux system that uses the (now outdated
# and unmaintained) linuxthreads threading library. There's an issue
# when combining linuxthreads with a failed execv call: see
# http://bugs.python.org/issue4970.
if hasattr(sys, 'thread_info') and sys.thread_info.version:
USING_LINUXTHREADS = sys.thread_info.version.startswith("linuxthreads")
else:
USING_LINUXTHREADS = False
# Issue #14110: Some tests fail on FreeBSD if the user is in the wheel group.
HAVE_WHEEL_GROUP = sys.platform.startswith('freebsd') and os.getgid() == 0
def requires_os_func(name):
return unittest.skipUnless(hasattr(os, name), 'requires os.%s' % name)
def create_file(filename, content=b'content'):
with open(filename, "xb", 0) as fp:
fp.write(content)
class MiscTests(unittest.TestCase):
def test_getcwd(self):
cwd = os.getcwd()
self.assertIsInstance(cwd, str)
def test_getcwd_long_path(self):
# bpo-37412: On Linux, PATH_MAX is usually around 4096 bytes. On
# Windows, MAX_PATH is defined as 260 characters, but Windows supports
# longer path if longer paths support is enabled. Internally, the os
# module uses MAXPATHLEN which is at least 1024.
#
# Use a directory name of 200 characters to fit into Windows MAX_PATH
# limit.
#
# On Windows, the test can stop when trying to create a path longer
# than MAX_PATH if long paths support is disabled:
# see RtlAreLongPathsEnabled().
min_len = 2000 # characters
dirlen = 200 # characters
dirname = 'python_test_dir_'
dirname = dirname + ('a' * (dirlen - len(dirname)))
with tempfile.TemporaryDirectory() as tmpdir:
with os_helper.change_cwd(tmpdir) as path:
expected = path
while True:
cwd = os.getcwd()
self.assertEqual(cwd, expected)
need = min_len - (len(cwd) + len(os.path.sep))
if need <= 0:
break
if len(dirname) > need and need > 0:
dirname = dirname[:need]
path = os.path.join(path, dirname)
try:
os.mkdir(path)
# On Windows, chdir() can fail
# even if mkdir() succeeded
os.chdir(path)
except FileNotFoundError:
# On Windows, catch ERROR_PATH_NOT_FOUND (3) and
# ERROR_FILENAME_EXCED_RANGE (206) errors
# ("The filename or extension is too long")
break
except OSError as exc:
if exc.errno == errno.ENAMETOOLONG:
break
else:
raise
expected = path
if support.verbose:
print(f"Tested current directory length: {len(cwd)}")
def test_getcwdb(self):
cwd = os.getcwdb()
self.assertIsInstance(cwd, bytes)
self.assertEqual(os.fsdecode(cwd), os.getcwd())
# Tests creating TESTFN
class FileTests(unittest.TestCase):
def setUp(self):
if os.path.lexists(os_helper.TESTFN):
os.unlink(os_helper.TESTFN)
tearDown = setUp
def test_access(self):
f = os.open(os_helper.TESTFN, os.O_CREAT|os.O_RDWR)
os.close(f)
self.assertTrue(os.access(os_helper.TESTFN, os.W_OK))
def test_closerange(self):
first = os.open(os_helper.TESTFN, os.O_CREAT|os.O_RDWR)
# We must allocate two consecutive file descriptors, otherwise
# it will mess up other file descriptors (perhaps even the three
# standard ones).
second = os.dup(first)
try:
retries = 0
while second != first + 1:
os.close(first)
retries += 1
if retries > 10:
# XXX test skipped
self.skipTest("couldn't allocate two consecutive fds")
first, second = second, os.dup(second)
finally:
os.close(second)
# close a fd that is open, and one that isn't
os.closerange(first, first + 2)
self.assertRaises(OSError, os.write, first, b"a")
@support.cpython_only
def test_rename(self):
path = os_helper.TESTFN
old = sys.getrefcount(path)
self.assertRaises(TypeError, os.rename, path, 0)
new = sys.getrefcount(path)
self.assertEqual(old, new)
def test_read(self):
with open(os_helper.TESTFN, "w+b") as fobj:
fobj.write(b"spam")
fobj.flush()
fd = fobj.fileno()
os.lseek(fd, 0, 0)
s = os.read(fd, 4)
self.assertEqual(type(s), bytes)
self.assertEqual(s, b"spam")
@support.cpython_only
# Skip the test on 32-bit platforms: the number of bytes must fit in a
# Py_ssize_t type
@unittest.skipUnless(INT_MAX < PY_SSIZE_T_MAX,
"needs INT_MAX < PY_SSIZE_T_MAX")
@support.bigmemtest(size=INT_MAX + 10, memuse=1, dry_run=False)
def test_large_read(self, size):
self.addCleanup(os_helper.unlink, os_helper.TESTFN)
create_file(os_helper.TESTFN, b'test')
# Issue #21932: Make sure that os.read() does not raise an
# OverflowError for size larger than INT_MAX
with open(os_helper.TESTFN, "rb") as fp:
data = os.read(fp.fileno(), size)
# The test does not try to read more than 2 GiB at once because the
# operating system is free to return less bytes than requested.
self.assertEqual(data, b'test')
def test_write(self):
# os.write() accepts bytes- and buffer-like objects but not strings
fd = os.open(os_helper.TESTFN, os.O_CREAT | os.O_WRONLY)
self.assertRaises(TypeError, os.write, fd, "beans")
os.write(fd, b"bacon\n")
os.write(fd, bytearray(b"eggs\n"))
os.write(fd, memoryview(b"spam\n"))
os.close(fd)
with open(os_helper.TESTFN, "rb") as fobj:
self.assertEqual(fobj.read().splitlines(),
[b"bacon", b"eggs", b"spam"])
def write_windows_console(self, *args):
retcode = subprocess.call(args,
# use a new console to not flood the test output
creationflags=subprocess.CREATE_NEW_CONSOLE,
# use a shell to hide the console window (SW_HIDE)
shell=True)
self.assertEqual(retcode, 0)
@unittest.skipUnless(sys.platform == 'win32',
'test specific to the Windows console')
def test_write_windows_console(self):
# Issue #11395: the Windows console returns an error (12: not enough
# space error) on writing into stdout if stdout mode is binary and the
# length is greater than 66,000 bytes (or less, depending on heap
# usage).
code = "print('x' * 100000)"
self.write_windows_console(sys.executable, "-c", code)
self.write_windows_console(sys.executable, "-u", "-c", code)
def fdopen_helper(self, *args):
fd = os.open(os_helper.TESTFN, os.O_RDONLY)
f = os.fdopen(fd, *args)
f.close()
def test_fdopen(self):
fd = os.open(os_helper.TESTFN, os.O_CREAT|os.O_RDWR)
os.close(fd)
self.fdopen_helper()
self.fdopen_helper('r')
self.fdopen_helper('r', 100)
def test_replace(self):
TESTFN2 = os_helper.TESTFN + ".2"
self.addCleanup(os_helper.unlink, os_helper.TESTFN)
self.addCleanup(os_helper.unlink, TESTFN2)
create_file(os_helper.TESTFN, b"1")
create_file(TESTFN2, b"2")
os.replace(os_helper.TESTFN, TESTFN2)
self.assertRaises(FileNotFoundError, os.stat, os_helper.TESTFN)
with open(TESTFN2, 'r') as f:
self.assertEqual(f.read(), "1")
def test_open_keywords(self):
f = os.open(path=__file__, flags=os.O_RDONLY, mode=0o777,
dir_fd=None)
os.close(f)
def test_symlink_keywords(self):
symlink = support.get_attribute(os, "symlink")
try:
symlink(src='target', dst=os_helper.TESTFN,
target_is_directory=False, dir_fd=None)
except (NotImplementedError, OSError):
pass # No OS support or unprivileged user
@unittest.skipUnless(hasattr(os, 'copy_file_range'), 'test needs os.copy_file_range()')
def test_copy_file_range_invalid_values(self):
with self.assertRaises(ValueError):
os.copy_file_range(0, 1, -10)
@unittest.skipUnless(hasattr(os, 'copy_file_range'), 'test needs os.copy_file_range()')
def test_copy_file_range(self):
TESTFN2 = os_helper.TESTFN + ".3"
data = b'0123456789'
create_file(os_helper.TESTFN, data)
self.addCleanup(os_helper.unlink, os_helper.TESTFN)
in_file = open(os_helper.TESTFN, 'rb')
self.addCleanup(in_file.close)
in_fd = in_file.fileno()
out_file = open(TESTFN2, 'w+b')
self.addCleanup(os_helper.unlink, TESTFN2)
self.addCleanup(out_file.close)
out_fd = out_file.fileno()
try:
i = os.copy_file_range(in_fd, out_fd, 5)
except OSError as e:
# Handle the case in which Python was compiled
# in a system with the syscall but without support
# in the kernel.
if e.errno != errno.ENOSYS:
raise
self.skipTest(e)
else:
# The number of copied bytes can be less than
# the number of bytes originally requested.
self.assertIn(i, range(0, 6));
with open(TESTFN2, 'rb') as in_file:
self.assertEqual(in_file.read(), data[:i])
@unittest.skipUnless(hasattr(os, 'copy_file_range'), 'test needs os.copy_file_range()')
def test_copy_file_range_offset(self):
TESTFN4 = os_helper.TESTFN + ".4"
data = b'0123456789'
bytes_to_copy = 6
in_skip = 3
out_seek = 5
create_file(os_helper.TESTFN, data)
self.addCleanup(os_helper.unlink, os_helper.TESTFN)
in_file = open(os_helper.TESTFN, 'rb')
self.addCleanup(in_file.close)
in_fd = in_file.fileno()
out_file = open(TESTFN4, 'w+b')
self.addCleanup(os_helper.unlink, TESTFN4)
self.addCleanup(out_file.close)
out_fd = out_file.fileno()
try:
i = os.copy_file_range(in_fd, out_fd, bytes_to_copy,
offset_src=in_skip,
offset_dst=out_seek)
except OSError as e:
# Handle the case in which Python was compiled
# in a system with the syscall but without support
# in the kernel.
if e.errno != errno.ENOSYS:
raise
self.skipTest(e)
else:
# The number of copied bytes can be less than
# the number of bytes originally requested.
self.assertIn(i, range(0, bytes_to_copy+1));
with open(TESTFN4, 'rb') as in_file:
read = in_file.read()
# seeked bytes (5) are zero'ed
self.assertEqual(read[:out_seek], b'\x00'*out_seek)
# 012 are skipped (in_skip)
# 345678 are copied in the file (in_skip + bytes_to_copy)
self.assertEqual(read[out_seek:],
data[in_skip:in_skip+i])
# Test attributes on return values from os.*stat* family.
class StatAttributeTests(unittest.TestCase):
def setUp(self):
self.fname = os_helper.TESTFN
self.addCleanup(os_helper.unlink, self.fname)
create_file(self.fname, b"ABC")
def check_stat_attributes(self, fname):
result = os.stat(fname)
# Make sure direct access works
self.assertEqual(result[stat.ST_SIZE], 3)
self.assertEqual(result.st_size, 3)
# Make sure all the attributes are there
members = dir(result)
for name in dir(stat):
if name[:3] == 'ST_':
attr = name.lower()
if name.endswith("TIME"):
def trunc(x): return int(x)
else:
def trunc(x): return x
self.assertEqual(trunc(getattr(result, attr)),
result[getattr(stat, name)])
self.assertIn(attr, members)
# Make sure that the st_?time and st_?time_ns fields roughly agree
# (they should always agree up to around tens-of-microseconds)
for name in 'st_atime st_mtime st_ctime'.split():
floaty = int(getattr(result, name) * 100000)
nanosecondy = getattr(result, name + "_ns") // 10000
self.assertAlmostEqual(floaty, nanosecondy, delta=2)
try:
result[200]
self.fail("No exception raised")
except IndexError:
pass
# Make sure that assignment fails
try:
result.st_mode = 1
self.fail("No exception raised")
except AttributeError:
pass
try:
result.st_rdev = 1
self.fail("No exception raised")
except (AttributeError, TypeError):
pass
try:
result.parrot = 1
self.fail("No exception raised")
except AttributeError:
pass
# Use the stat_result constructor with a too-short tuple.
try:
result2 = os.stat_result((10,))
self.fail("No exception raised")
except TypeError:
pass
# Use the constructor with a too-long tuple.
try:
result2 = os.stat_result((0,1,2,3,4,5,6,7,8,9,10,11,12,13,14))
except TypeError:
pass
def test_stat_attributes(self):
self.check_stat_attributes(self.fname)
def test_stat_attributes_bytes(self):
try:
fname = self.fname.encode(sys.getfilesystemencoding())
except UnicodeEncodeError:
self.skipTest("cannot encode %a for the filesystem" % self.fname)
self.check_stat_attributes(fname)
def test_stat_result_pickle(self):
result = os.stat(self.fname)
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
p = pickle.dumps(result, proto)
self.assertIn(b'stat_result', p)
if proto < 4:
self.assertIn(b'cos\nstat_result\n', p)
unpickled = pickle.loads(p)
self.assertEqual(result, unpickled)
@unittest.skipUnless(hasattr(os, 'statvfs'), 'test needs os.statvfs()')
def test_statvfs_attributes(self):
result = os.statvfs(self.fname)
# Make sure direct access works
self.assertEqual(result.f_bfree, result[3])
# Make sure all the attributes are there.
members = ('bsize', 'frsize', 'blocks', 'bfree', 'bavail', 'files',
'ffree', 'favail', 'flag', 'namemax')
for value, member in enumerate(members):
self.assertEqual(getattr(result, 'f_' + member), result[value])
self.assertTrue(isinstance(result.f_fsid, int))
# Test that the size of the tuple doesn't change
self.assertEqual(len(result), 10)
# Make sure that assignment really fails
try:
result.f_bfree = 1
self.fail("No exception raised")
except AttributeError:
pass
try:
result.parrot = 1
self.fail("No exception raised")
except AttributeError:
pass
# Use the constructor with a too-short tuple.
try:
result2 = os.statvfs_result((10,))
self.fail("No exception raised")
except TypeError:
pass
# Use the constructor with a too-long tuple.
try:
result2 = os.statvfs_result((0,1,2,3,4,5,6,7,8,9,10,11,12,13,14))
except TypeError:
pass
@unittest.skipUnless(hasattr(os, 'statvfs'),
"need os.statvfs()")
def test_statvfs_result_pickle(self):
result = os.statvfs(self.fname)
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
p = pickle.dumps(result, proto)
self.assertIn(b'statvfs_result', p)
if proto < 4:
self.assertIn(b'cos\nstatvfs_result\n', p)
unpickled = pickle.loads(p)
self.assertEqual(result, unpickled)
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
def test_1686475(self):
# Verify that an open file can be stat'ed
try:
os.stat(r"c:\pagefile.sys")
except FileNotFoundError:
self.skipTest(r'c:\pagefile.sys does not exist')
except OSError as e:
self.fail("Could not stat pagefile.sys")
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
@unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()")
def test_15261(self):
# Verify that stat'ing a closed fd does not cause crash
r, w = os.pipe()
try:
os.stat(r) # should not raise error
finally:
os.close(r)
os.close(w)
with self.assertRaises(OSError) as ctx:
os.stat(r)
self.assertEqual(ctx.exception.errno, errno.EBADF)
def check_file_attributes(self, result):
self.assertTrue(hasattr(result, 'st_file_attributes'))
self.assertTrue(isinstance(result.st_file_attributes, int))
self.assertTrue(0 <= result.st_file_attributes <= 0xFFFFFFFF)
@unittest.skipUnless(sys.platform == "win32",
"st_file_attributes is Win32 specific")
def test_file_attributes(self):
# test file st_file_attributes (FILE_ATTRIBUTE_DIRECTORY not set)
result = os.stat(self.fname)
self.check_file_attributes(result)
self.assertEqual(
result.st_file_attributes & stat.FILE_ATTRIBUTE_DIRECTORY,
0)
# test directory st_file_attributes (FILE_ATTRIBUTE_DIRECTORY set)
dirname = os_helper.TESTFN + "dir"
os.mkdir(dirname)
self.addCleanup(os.rmdir, dirname)
result = os.stat(dirname)
self.check_file_attributes(result)
self.assertEqual(
result.st_file_attributes & stat.FILE_ATTRIBUTE_DIRECTORY,
stat.FILE_ATTRIBUTE_DIRECTORY)
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
def test_access_denied(self):
# Default to FindFirstFile WIN32_FIND_DATA when access is
# denied. See issue 28075.
# os.environ['TEMP'] should be located on a volume that
# supports file ACLs.
fname = os.path.join(os.environ['TEMP'], self.fname)
self.addCleanup(os_helper.unlink, fname)
create_file(fname, b'ABC')
# Deny the right to [S]YNCHRONIZE on the file to
# force CreateFile to fail with ERROR_ACCESS_DENIED.
DETACHED_PROCESS = 8
subprocess.check_call(
# bpo-30584: Use security identifier *S-1-5-32-545 instead
# of localized "Users" to not depend on the locale.
['icacls.exe', fname, '/deny', '*S-1-5-32-545:(S)'],
creationflags=DETACHED_PROCESS
)
result = os.stat(fname)
self.assertNotEqual(result.st_size, 0)
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
def test_stat_block_device(self):
# bpo-38030: os.stat fails for block devices
# Test a filename like "//./C:"
fname = "//./" + os.path.splitdrive(os.getcwd())[0]
result = os.stat(fname)
self.assertEqual(result.st_mode, stat.S_IFBLK)
class UtimeTests(unittest.TestCase):
def setUp(self):
self.dirname = os_helper.TESTFN
self.fname = os.path.join(self.dirname, "f1")
self.addCleanup(os_helper.rmtree, self.dirname)
os.mkdir(self.dirname)
create_file(self.fname)
def support_subsecond(self, filename):
# Heuristic to check if the filesystem supports timestamp with
# subsecond resolution: check if float and int timestamps are different
st = os.stat(filename)
return ((st.st_atime != st[7])
or (st.st_mtime != st[8])
or (st.st_ctime != st[9]))
def _test_utime(self, set_time, filename=None):
if not filename:
filename = self.fname
support_subsecond = self.support_subsecond(filename)
if support_subsecond:
# Timestamp with a resolution of 1 microsecond (10^-6).
#
# The resolution of the C internal function used by os.utime()
# depends on the platform: 1 sec, 1 us, 1 ns. Writing a portable
# test with a resolution of 1 ns requires more work:
# see the issue #15745.
atime_ns = 1002003000 # 1.002003 seconds
mtime_ns = 4005006000 # 4.005006 seconds
else:
# use a resolution of 1 second
atime_ns = 5 * 10**9
mtime_ns = 8 * 10**9
set_time(filename, (atime_ns, mtime_ns))
st = os.stat(filename)
if support_subsecond:
self.assertAlmostEqual(st.st_atime, atime_ns * 1e-9, delta=1e-6)
self.assertAlmostEqual(st.st_mtime, mtime_ns * 1e-9, delta=1e-6)
else:
self.assertEqual(st.st_atime, atime_ns * 1e-9)
self.assertEqual(st.st_mtime, mtime_ns * 1e-9)
self.assertEqual(st.st_atime_ns, atime_ns)
self.assertEqual(st.st_mtime_ns, mtime_ns)
def test_utime(self):
def set_time(filename, ns):
# test the ns keyword parameter
os.utime(filename, ns=ns)
self._test_utime(set_time)
@staticmethod
def ns_to_sec(ns):
# Convert a number of nanosecond (int) to a number of seconds (float).
# Round towards infinity by adding 0.5 nanosecond to avoid rounding
# issue, os.utime() rounds towards minus infinity.
return (ns * 1e-9) + 0.5e-9
def test_utime_by_indexed(self):
# pass times as floating point seconds as the second indexed parameter
def set_time(filename, ns):
atime_ns, mtime_ns = ns
atime = self.ns_to_sec(atime_ns)
mtime = self.ns_to_sec(mtime_ns)
# test utimensat(timespec), utimes(timeval), utime(utimbuf)
# or utime(time_t)
os.utime(filename, (atime, mtime))
self._test_utime(set_time)
def test_utime_by_times(self):
def set_time(filename, ns):
atime_ns, mtime_ns = ns
atime = self.ns_to_sec(atime_ns)
mtime = self.ns_to_sec(mtime_ns)
# test the times keyword parameter
os.utime(filename, times=(atime, mtime))
self._test_utime(set_time)
@unittest.skipUnless(os.utime in os.supports_follow_symlinks,
"follow_symlinks support for utime required "
"for this test.")
def test_utime_nofollow_symlinks(self):
def set_time(filename, ns):
# use follow_symlinks=False to test utimensat(timespec)
# or lutimes(timeval)
os.utime(filename, ns=ns, follow_symlinks=False)
self._test_utime(set_time)
@unittest.skipUnless(os.utime in os.supports_fd,
"fd support for utime required for this test.")
def test_utime_fd(self):
def set_time(filename, ns):
with open(filename, 'wb', 0) as fp:
# use a file descriptor to test futimens(timespec)
# or futimes(timeval)
os.utime(fp.fileno(), ns=ns)
self._test_utime(set_time)
@unittest.skipUnless(os.utime in os.supports_dir_fd,
"dir_fd support for utime required for this test.")
def test_utime_dir_fd(self):
def set_time(filename, ns):
dirname, name = os.path.split(filename)
dirfd = os.open(dirname, os.O_RDONLY)
try:
# pass dir_fd to test utimensat(timespec) or futimesat(timeval)
os.utime(name, dir_fd=dirfd, ns=ns)
finally:
os.close(dirfd)
self._test_utime(set_time)
def test_utime_directory(self):
def set_time(filename, ns):
# test calling os.utime() on a directory
os.utime(filename, ns=ns)
self._test_utime(set_time, filename=self.dirname)
def _test_utime_current(self, set_time):
# Get the system clock
current = time.time()
# Call os.utime() to set the timestamp to the current system clock
set_time(self.fname)
if not self.support_subsecond(self.fname):
delta = 1.0
else:
# On Windows, the usual resolution of time.time() is 15.6 ms.
# bpo-30649: Tolerate 50 ms for slow Windows buildbots.
#
# x86 Gentoo Refleaks 3.x once failed with dt=20.2 ms. So use
# also 50 ms on other platforms.
delta = 0.050
st = os.stat(self.fname)
msg = ("st_time=%r, current=%r, dt=%r"
% (st.st_mtime, current, st.st_mtime - current))
self.assertAlmostEqual(st.st_mtime, current,
delta=delta, msg=msg)
def test_utime_current(self):
def set_time(filename):
# Set to the current time in the new way
os.utime(self.fname)
self._test_utime_current(set_time)
def test_utime_current_old(self):
def set_time(filename):
# Set to the current time in the old explicit way.
os.utime(self.fname, None)
self._test_utime_current(set_time)
def get_file_system(self, path):
if sys.platform == 'win32':
root = os.path.splitdrive(os.path.abspath(path))[0] + '\\'
import ctypes
kernel32 = ctypes.windll.kernel32
buf = ctypes.create_unicode_buffer("", 100)
ok = kernel32.GetVolumeInformationW(root, None, 0,
None, None, None,
buf, len(buf))
if ok:
return buf.value
# return None if the filesystem is unknown
def test_large_time(self):
# Many filesystems are limited to the year 2038. At least, the test
# pass with NTFS filesystem.
if self.get_file_system(self.dirname) != "NTFS":
self.skipTest("requires NTFS")
large = 5000000000 # some day in 2128
os.utime(self.fname, (large, large))
self.assertEqual(os.stat(self.fname).st_mtime, large)
def test_utime_invalid_arguments(self):
# seconds and nanoseconds parameters are mutually exclusive
with self.assertRaises(ValueError):
os.utime(self.fname, (5, 5), ns=(5, 5))
with self.assertRaises(TypeError):
os.utime(self.fname, [5, 5])
with self.assertRaises(TypeError):
os.utime(self.fname, (5,))
with self.assertRaises(TypeError):
os.utime(self.fname, (5, 5, 5))
with self.assertRaises(TypeError):
os.utime(self.fname, ns=[5, 5])
with self.assertRaises(TypeError):
os.utime(self.fname, ns=(5,))
with self.assertRaises(TypeError):
os.utime(self.fname, ns=(5, 5, 5))
if os.utime not in os.supports_follow_symlinks:
with self.assertRaises(NotImplementedError):
os.utime(self.fname, (5, 5), follow_symlinks=False)
if os.utime not in os.supports_fd:
with open(self.fname, 'wb', 0) as fp:
with self.assertRaises(TypeError):
os.utime(fp.fileno(), (5, 5))
if os.utime not in os.supports_dir_fd:
with self.assertRaises(NotImplementedError):
os.utime(self.fname, (5, 5), dir_fd=0)
@support.cpython_only
def test_issue31577(self):
# The interpreter shouldn't crash in case utime() received a bad
# ns argument.
def get_bad_int(divmod_ret_val):
class BadInt:
def __divmod__(*args):
return divmod_ret_val
return BadInt()
with self.assertRaises(TypeError):
os.utime(self.fname, ns=(get_bad_int(42), 1))
with self.assertRaises(TypeError):
os.utime(self.fname, ns=(get_bad_int(()), 1))
with self.assertRaises(TypeError):
os.utime(self.fname, ns=(get_bad_int((1, 2, 3)), 1))
from test import mapping_tests
class EnvironTests(mapping_tests.BasicTestMappingProtocol):
"""check that os.environ object conform to mapping protocol"""
type2test = None
def setUp(self):
self.__save = dict(os.environ)
if os.supports_bytes_environ:
self.__saveb = dict(os.environb)
for key, value in self._reference().items():
os.environ[key] = value
def tearDown(self):
os.environ.clear()
os.environ.update(self.__save)
if os.supports_bytes_environ:
os.environb.clear()
os.environb.update(self.__saveb)
def _reference(self):
return {"KEY1":"VALUE1", "KEY2":"VALUE2", "KEY3":"VALUE3"}
def _empty_mapping(self):
os.environ.clear()
return os.environ
# Bug 1110478
@unittest.skipUnless(unix_shell and os.path.exists(unix_shell),
'requires a shell')
def test_update2(self):
os.environ.clear()
os.environ.update(HELLO="World")
with os.popen("%s -c 'echo $HELLO'" % unix_shell) as popen:
value = popen.read().strip()
self.assertEqual(value, "World")
@unittest.skipUnless(unix_shell and os.path.exists(unix_shell),
'requires a shell')
def test_os_popen_iter(self):
with os.popen("%s -c 'echo \"line1\nline2\nline3\"'"
% unix_shell) as popen:
it = iter(popen)
self.assertEqual(next(it), "line1\n")
self.assertEqual(next(it), "line2\n")
self.assertEqual(next(it), "line3\n")
self.assertRaises(StopIteration, next, it)
# Verify environ keys and values from the OS are of the
# correct str type.
def test_keyvalue_types(self):
for key, val in os.environ.items():
self.assertEqual(type(key), str)
self.assertEqual(type(val), str)
def test_items(self):
for key, value in self._reference().items():
self.assertEqual(os.environ.get(key), value)
# Issue 7310
def test___repr__(self):
"""Check that the repr() of os.environ looks like environ({...})."""
env = os.environ
self.assertEqual(repr(env), 'environ({{{}}})'.format(', '.join(
'{!r}: {!r}'.format(key, value)
for key, value in env.items())))
def test_get_exec_path(self):
defpath_list = os.defpath.split(os.pathsep)
test_path = ['/monty', '/python', '', '/flying/circus']
test_env = {'PATH': os.pathsep.join(test_path)}
saved_environ = os.environ
try:
os.environ = dict(test_env)
# Test that defaulting to os.environ works.
self.assertSequenceEqual(test_path, os.get_exec_path())
self.assertSequenceEqual(test_path, os.get_exec_path(env=None))
finally:
os.environ = saved_environ
# No PATH environment variable
self.assertSequenceEqual(defpath_list, os.get_exec_path({}))
# Empty PATH environment variable
self.assertSequenceEqual(('',), os.get_exec_path({'PATH':''}))
# Supplied PATH environment variable
self.assertSequenceEqual(test_path, os.get_exec_path(test_env))
if os.supports_bytes_environ:
# env cannot contain 'PATH' and b'PATH' keys
try:
# ignore BytesWarning warning
with warnings.catch_warnings(record=True):
mixed_env = {'PATH': '1', b'PATH': b'2'}
except BytesWarning:
# mixed_env cannot be created with python -bb
pass
else:
self.assertRaises(ValueError, os.get_exec_path, mixed_env)
# bytes key and/or value
self.assertSequenceEqual(os.get_exec_path({b'PATH': b'abc'}),
['abc'])
self.assertSequenceEqual(os.get_exec_path({b'PATH': 'abc'}),
['abc'])
self.assertSequenceEqual(os.get_exec_path({'PATH': b'abc'}),
['abc'])
@unittest.skipUnless(os.supports_bytes_environ,
"os.environb required for this test.")
def test_environb(self):
# os.environ -> os.environb
value = 'euro\u20ac'
try:
value_bytes = value.encode(sys.getfilesystemencoding(),
'surrogateescape')
except UnicodeEncodeError:
msg = "U+20AC character is not encodable to %s" % (
sys.getfilesystemencoding(),)
self.skipTest(msg)
os.environ['unicode'] = value
self.assertEqual(os.environ['unicode'], value)
self.assertEqual(os.environb[b'unicode'], value_bytes)
# os.environb -> os.environ
value = b'\xff'
os.environb[b'bytes'] = value
self.assertEqual(os.environb[b'bytes'], value)
value_str = value.decode(sys.getfilesystemencoding(), 'surrogateescape')
self.assertEqual(os.environ['bytes'], value_str)
def test_putenv_unsetenv(self):
name = "PYTHONTESTVAR"
value = "testvalue"
code = f'import os; print(repr(os.environ.get({name!r})))'
with os_helper.EnvironmentVarGuard() as env:
env.pop(name, None)
os.putenv(name, value)
proc = subprocess.run([sys.executable, '-c', code], check=True,
stdout=subprocess.PIPE, text=True)
self.assertEqual(proc.stdout.rstrip(), repr(value))
os.unsetenv(name)
proc = subprocess.run([sys.executable, '-c', code], check=True,
stdout=subprocess.PIPE, text=True)
self.assertEqual(proc.stdout.rstrip(), repr(None))
# On OS X < 10.6, unsetenv() doesn't return a value (bpo-13415).
@support.requires_mac_ver(10, 6)
def test_putenv_unsetenv_error(self):
# Empty variable name is invalid.
# "=" and null character are not allowed in a variable name.
for name in ('', '=name', 'na=me', 'name=', 'name\0', 'na\0me'):
self.assertRaises((OSError, ValueError), os.putenv, name, "value")
self.assertRaises((OSError, ValueError), os.unsetenv, name)
if sys.platform == "win32":
# On Windows, an environment variable string ("name=value" string)
# is limited to 32,767 characters
longstr = 'x' * 32_768
self.assertRaises(ValueError, os.putenv, longstr, "1")
self.assertRaises(ValueError, os.putenv, "X", longstr)
self.assertRaises(ValueError, os.unsetenv, longstr)
def test_key_type(self):
missing = 'missingkey'
self.assertNotIn(missing, os.environ)
with self.assertRaises(KeyError) as cm:
os.environ[missing]
self.assertIs(cm.exception.args[0], missing)
self.assertTrue(cm.exception.__suppress_context__)
with self.assertRaises(KeyError) as cm:
del os.environ[missing]
self.assertIs(cm.exception.args[0], missing)
self.assertTrue(cm.exception.__suppress_context__)
def _test_environ_iteration(self, collection):
iterator = iter(collection)
new_key = "__new_key__"
next(iterator) # start iteration over os.environ.items
# add a new key in os.environ mapping
os.environ[new_key] = "test_environ_iteration"
try:
next(iterator) # force iteration over modified mapping
self.assertEqual(os.environ[new_key], "test_environ_iteration")
finally:
del os.environ[new_key]
def test_iter_error_when_changing_os_environ(self):
self._test_environ_iteration(os.environ)
def test_iter_error_when_changing_os_environ_items(self):
self._test_environ_iteration(os.environ.items())
def test_iter_error_when_changing_os_environ_values(self):
self._test_environ_iteration(os.environ.values())
def _test_underlying_process_env(self, var, expected):
if not (unix_shell and os.path.exists(unix_shell)):
return
with os.popen(f"{unix_shell} -c 'echo ${var}'") as popen:
value = popen.read().strip()
self.assertEqual(expected, value)
def test_or_operator(self):
overridden_key = '_TEST_VAR_'
original_value = 'original_value'
os.environ[overridden_key] = original_value
new_vars_dict = {'_A_': '1', '_B_': '2', overridden_key: '3'}
expected = dict(os.environ)
expected.update(new_vars_dict)
actual = os.environ | new_vars_dict
self.assertDictEqual(expected, actual)
self.assertEqual('3', actual[overridden_key])
new_vars_items = new_vars_dict.items()
self.assertIs(NotImplemented, os.environ.__or__(new_vars_items))
self._test_underlying_process_env('_A_', '')
self._test_underlying_process_env(overridden_key, original_value)
def test_ior_operator(self):
overridden_key = '_TEST_VAR_'
os.environ[overridden_key] = 'original_value'
new_vars_dict = {'_A_': '1', '_B_': '2', overridden_key: '3'}
expected = dict(os.environ)
expected.update(new_vars_dict)
os.environ |= new_vars_dict
self.assertEqual(expected, os.environ)
self.assertEqual('3', os.environ[overridden_key])
self._test_underlying_process_env('_A_', '1')
self._test_underlying_process_env(overridden_key, '3')
def test_ior_operator_invalid_dicts(self):
os_environ_copy = os.environ.copy()
with self.assertRaises(TypeError):
dict_with_bad_key = {1: '_A_'}
os.environ |= dict_with_bad_key
with self.assertRaises(TypeError):
dict_with_bad_val = {'_A_': 1}
os.environ |= dict_with_bad_val
# Check nothing was added.
self.assertEqual(os_environ_copy, os.environ)
def test_ior_operator_key_value_iterable(self):
overridden_key = '_TEST_VAR_'
os.environ[overridden_key] = 'original_value'
new_vars_items = (('_A_', '1'), ('_B_', '2'), (overridden_key, '3'))
expected = dict(os.environ)
expected.update(new_vars_items)
os.environ |= new_vars_items
self.assertEqual(expected, os.environ)
self.assertEqual('3', os.environ[overridden_key])
self._test_underlying_process_env('_A_', '1')
self._test_underlying_process_env(overridden_key, '3')
def test_ror_operator(self):
overridden_key = '_TEST_VAR_'
original_value = 'original_value'
os.environ[overridden_key] = original_value
new_vars_dict = {'_A_': '1', '_B_': '2', overridden_key: '3'}
expected = dict(new_vars_dict)
expected.update(os.environ)
actual = new_vars_dict | os.environ
self.assertDictEqual(expected, actual)
self.assertEqual(original_value, actual[overridden_key])
new_vars_items = new_vars_dict.items()
self.assertIs(NotImplemented, os.environ.__ror__(new_vars_items))
self._test_underlying_process_env('_A_', '')
self._test_underlying_process_env(overridden_key, original_value)
class WalkTests(unittest.TestCase):
"""Tests for os.walk()."""
# Wrapper to hide minor differences between os.walk and os.fwalk
# to tests both functions with the same code base
def walk(self, top, **kwargs):
if 'follow_symlinks' in kwargs:
kwargs['followlinks'] = kwargs.pop('follow_symlinks')
return os.walk(top, **kwargs)
def setUp(self):
join = os.path.join
self.addCleanup(os_helper.rmtree, os_helper.TESTFN)
# Build:
# TESTFN/
# TEST1/ a file kid and two directory kids
# tmp1
# SUB1/ a file kid and a directory kid
# tmp2
# SUB11/ no kids
# SUB2/ a file kid and a dirsymlink kid
# tmp3
# SUB21/ not readable
# tmp5
# link/ a symlink to TESTFN.2
# broken_link
# broken_link2
# broken_link3
# TEST2/
# tmp4 a lone file
self.walk_path = join(os_helper.TESTFN, "TEST1")
self.sub1_path = join(self.walk_path, "SUB1")
self.sub11_path = join(self.sub1_path, "SUB11")
sub2_path = join(self.walk_path, "SUB2")
sub21_path = join(sub2_path, "SUB21")
tmp1_path = join(self.walk_path, "tmp1")
tmp2_path = join(self.sub1_path, "tmp2")
tmp3_path = join(sub2_path, "tmp3")
tmp5_path = join(sub21_path, "tmp3")
self.link_path = join(sub2_path, "link")
t2_path = join(os_helper.TESTFN, "TEST2")
tmp4_path = join(os_helper.TESTFN, "TEST2", "tmp4")
broken_link_path = join(sub2_path, "broken_link")
broken_link2_path = join(sub2_path, "broken_link2")
broken_link3_path = join(sub2_path, "broken_link3")
# Create stuff.
os.makedirs(self.sub11_path)
os.makedirs(sub2_path)
os.makedirs(sub21_path)
os.makedirs(t2_path)
for path in tmp1_path, tmp2_path, tmp3_path, tmp4_path, tmp5_path:
with open(path, "x", encoding='utf-8') as f:
f.write("I'm " + path + " and proud of it. Blame test_os.\n")
if os_helper.can_symlink():
os.symlink(os.path.abspath(t2_path), self.link_path)
os.symlink('broken', broken_link_path, True)
os.symlink(join('tmp3', 'broken'), broken_link2_path, True)
os.symlink(join('SUB21', 'tmp5'), broken_link3_path, True)
self.sub2_tree = (sub2_path, ["SUB21", "link"],
["broken_link", "broken_link2", "broken_link3",
"tmp3"])
else:
self.sub2_tree = (sub2_path, ["SUB21"], ["tmp3"])
os.chmod(sub21_path, 0)
try:
os.listdir(sub21_path)
except PermissionError:
self.addCleanup(os.chmod, sub21_path, stat.S_IRWXU)
else:
os.chmod(sub21_path, stat.S_IRWXU)
os.unlink(tmp5_path)
os.rmdir(sub21_path)
del self.sub2_tree[1][:1]
def test_walk_topdown(self):
# Walk top-down.
all = list(self.walk(self.walk_path))
self.assertEqual(len(all), 4)
# We can't know which order SUB1 and SUB2 will appear in.
# Not flipped: TESTFN, SUB1, SUB11, SUB2
# flipped: TESTFN, SUB2, SUB1, SUB11
flipped = all[0][1][0] != "SUB1"
all[0][1].sort()
all[3 - 2 * flipped][-1].sort()
all[3 - 2 * flipped][1].sort()
self.assertEqual(all[0], (self.walk_path, ["SUB1", "SUB2"], ["tmp1"]))
self.assertEqual(all[1 + flipped], (self.sub1_path, ["SUB11"], ["tmp2"]))
self.assertEqual(all[2 + flipped], (self.sub11_path, [], []))
self.assertEqual(all[3 - 2 * flipped], self.sub2_tree)
def test_walk_prune(self, walk_path=None):
if walk_path is None:
walk_path = self.walk_path
# Prune the search.
all = []
for root, dirs, files in self.walk(walk_path):
all.append((root, dirs, files))
# Don't descend into SUB1.
if 'SUB1' in dirs:
# Note that this also mutates the dirs we appended to all!
dirs.remove('SUB1')
self.assertEqual(len(all), 2)
self.assertEqual(all[0], (self.walk_path, ["SUB2"], ["tmp1"]))
all[1][-1].sort()
all[1][1].sort()
self.assertEqual(all[1], self.sub2_tree)
def test_file_like_path(self):
self.test_walk_prune(FakePath(self.walk_path))
def test_walk_bottom_up(self):
# Walk bottom-up.
all = list(self.walk(self.walk_path, topdown=False))
self.assertEqual(len(all), 4, all)
# We can't know which order SUB1 and SUB2 will appear in.
# Not flipped: SUB11, SUB1, SUB2, TESTFN
# flipped: SUB2, SUB11, SUB1, TESTFN
flipped = all[3][1][0] != "SUB1"
all[3][1].sort()
all[2 - 2 * flipped][-1].sort()
all[2 - 2 * flipped][1].sort()
self.assertEqual(all[3],
(self.walk_path, ["SUB1", "SUB2"], ["tmp1"]))
self.assertEqual(all[flipped],
(self.sub11_path, [], []))
self.assertEqual(all[flipped + 1],
(self.sub1_path, ["SUB11"], ["tmp2"]))
self.assertEqual(all[2 - 2 * flipped],
self.sub2_tree)
def test_walk_symlink(self):
if not os_helper.can_symlink():
self.skipTest("need symlink support")
# Walk, following symlinks.
walk_it = self.walk(self.walk_path, follow_symlinks=True)
for root, dirs, files in walk_it:
if root == self.link_path:
self.assertEqual(dirs, [])
self.assertEqual(files, ["tmp4"])
break
else:
self.fail("Didn't follow symlink with followlinks=True")
def test_walk_bad_dir(self):
# Walk top-down.
errors = []
walk_it = self.walk(self.walk_path, onerror=errors.append)
root, dirs, files = next(walk_it)
self.assertEqual(errors, [])
dir1 = 'SUB1'
path1 = os.path.join(root, dir1)
path1new = os.path.join(root, dir1 + '.new')
os.rename(path1, path1new)
try:
roots = [r for r, d, f in walk_it]
self.assertTrue(errors)
self.assertNotIn(path1, roots)
self.assertNotIn(path1new, roots)
for dir2 in dirs:
if dir2 != dir1:
self.assertIn(os.path.join(root, dir2), roots)
finally:
os.rename(path1new, path1)
def test_walk_many_open_files(self):
depth = 30
base = os.path.join(os_helper.TESTFN, 'deep')
p = os.path.join(base, *(['d']*depth))
os.makedirs(p)
iters = [self.walk(base, topdown=False) for j in range(100)]
for i in range(depth + 1):
expected = (p, ['d'] if i else [], [])
for it in iters:
self.assertEqual(next(it), expected)
p = os.path.dirname(p)
iters = [self.walk(base, topdown=True) for j in range(100)]
p = base
for i in range(depth + 1):
expected = (p, ['d'] if i < depth else [], [])
for it in iters:
self.assertEqual(next(it), expected)
p = os.path.join(p, 'd')
@unittest.skipUnless(hasattr(os, 'fwalk'), "Test needs os.fwalk()")
class FwalkTests(WalkTests):
"""Tests for os.fwalk()."""
def walk(self, top, **kwargs):
for root, dirs, files, root_fd in self.fwalk(top, **kwargs):
yield (root, dirs, files)
def fwalk(self, *args, **kwargs):
return os.fwalk(*args, **kwargs)
def _compare_to_walk(self, walk_kwargs, fwalk_kwargs):
"""
compare with walk() results.
"""
walk_kwargs = walk_kwargs.copy()
fwalk_kwargs = fwalk_kwargs.copy()
for topdown, follow_symlinks in itertools.product((True, False), repeat=2):
walk_kwargs.update(topdown=topdown, followlinks=follow_symlinks)
fwalk_kwargs.update(topdown=topdown, follow_symlinks=follow_symlinks)
expected = {}
for root, dirs, files in os.walk(**walk_kwargs):
expected[root] = (set(dirs), set(files))
for root, dirs, files, rootfd in self.fwalk(**fwalk_kwargs):
self.assertIn(root, expected)
self.assertEqual(expected[root], (set(dirs), set(files)))
def test_compare_to_walk(self):
kwargs = {'top': os_helper.TESTFN}
self._compare_to_walk(kwargs, kwargs)
def test_dir_fd(self):
try:
fd = os.open(".", os.O_RDONLY)
walk_kwargs = {'top': os_helper.TESTFN}
fwalk_kwargs = walk_kwargs.copy()
fwalk_kwargs['dir_fd'] = fd
self._compare_to_walk(walk_kwargs, fwalk_kwargs)
finally:
os.close(fd)
def test_yields_correct_dir_fd(self):
# check returned file descriptors
for topdown, follow_symlinks in itertools.product((True, False), repeat=2):
args = os_helper.TESTFN, topdown, None
for root, dirs, files, rootfd in self.fwalk(*args, follow_symlinks=follow_symlinks):
# check that the FD is valid
os.fstat(rootfd)
# redundant check
os.stat(rootfd)
# check that listdir() returns consistent information
self.assertEqual(set(os.listdir(rootfd)), set(dirs) | set(files))
def test_fd_leak(self):
# Since we're opening a lot of FDs, we must be careful to avoid leaks:
# we both check that calling fwalk() a large number of times doesn't
# yield EMFILE, and that the minimum allocated FD hasn't changed.
minfd = os.dup(1)
os.close(minfd)
for i in range(256):
for x in self.fwalk(os_helper.TESTFN):
pass
newfd = os.dup(1)
self.addCleanup(os.close, newfd)
self.assertEqual(newfd, minfd)
# fwalk() keeps file descriptors open
test_walk_many_open_files = None
class BytesWalkTests(WalkTests):
"""Tests for os.walk() with bytes."""
def walk(self, top, **kwargs):
if 'follow_symlinks' in kwargs:
kwargs['followlinks'] = kwargs.pop('follow_symlinks')
for broot, bdirs, bfiles in os.walk(os.fsencode(top), **kwargs):
root = os.fsdecode(broot)
dirs = list(map(os.fsdecode, bdirs))
files = list(map(os.fsdecode, bfiles))
yield (root, dirs, files)
bdirs[:] = list(map(os.fsencode, dirs))
bfiles[:] = list(map(os.fsencode, files))
@unittest.skipUnless(hasattr(os, 'fwalk'), "Test needs os.fwalk()")
class BytesFwalkTests(FwalkTests):
"""Tests for os.walk() with bytes."""
def fwalk(self, top='.', *args, **kwargs):
for broot, bdirs, bfiles, topfd in os.fwalk(os.fsencode(top), *args, **kwargs):
root = os.fsdecode(broot)
dirs = list(map(os.fsdecode, bdirs))
files = list(map(os.fsdecode, bfiles))
yield (root, dirs, files, topfd)
bdirs[:] = list(map(os.fsencode, dirs))
bfiles[:] = list(map(os.fsencode, files))
class MakedirTests(unittest.TestCase):
def setUp(self):
os.mkdir(os_helper.TESTFN)
def test_makedir(self):
base = os_helper.TESTFN
path = os.path.join(base, 'dir1', 'dir2', 'dir3')
os.makedirs(path) # Should work
path = os.path.join(base, 'dir1', 'dir2', 'dir3', 'dir4')
os.makedirs(path)
# Try paths with a '.' in them
self.assertRaises(OSError, os.makedirs, os.curdir)
path = os.path.join(base, 'dir1', 'dir2', 'dir3', 'dir4', 'dir5', os.curdir)
os.makedirs(path)
path = os.path.join(base, 'dir1', os.curdir, 'dir2', 'dir3', 'dir4',
'dir5', 'dir6')
os.makedirs(path)
def test_mode(self):
with os_helper.temp_umask(0o002):
base = os_helper.TESTFN
parent = os.path.join(base, 'dir1')
path = os.path.join(parent, 'dir2')
os.makedirs(path, 0o555)
self.assertTrue(os.path.exists(path))
self.assertTrue(os.path.isdir(path))
if os.name != 'nt':
self.assertEqual(os.stat(path).st_mode & 0o777, 0o555)
self.assertEqual(os.stat(parent).st_mode & 0o777, 0o775)
def test_exist_ok_existing_directory(self):
path = os.path.join(os_helper.TESTFN, 'dir1')
mode = 0o777
old_mask = os.umask(0o022)
os.makedirs(path, mode)
self.assertRaises(OSError, os.makedirs, path, mode)
self.assertRaises(OSError, os.makedirs, path, mode, exist_ok=False)
os.makedirs(path, 0o776, exist_ok=True)
os.makedirs(path, mode=mode, exist_ok=True)
os.umask(old_mask)
# Issue #25583: A drive root could raise PermissionError on Windows
os.makedirs(os.path.abspath('/'), exist_ok=True)
def test_exist_ok_s_isgid_directory(self):
path = os.path.join(os_helper.TESTFN, 'dir1')
S_ISGID = stat.S_ISGID
mode = 0o777
old_mask = os.umask(0o022)
try:
existing_testfn_mode = stat.S_IMODE(
os.lstat(os_helper.TESTFN).st_mode)
try:
os.chmod(os_helper.TESTFN, existing_testfn_mode | S_ISGID)
except PermissionError:
raise unittest.SkipTest('Cannot set S_ISGID for dir.')
if (os.lstat(os_helper.TESTFN).st_mode & S_ISGID != S_ISGID):
raise unittest.SkipTest('No support for S_ISGID dir mode.')
# The os should apply S_ISGID from the parent dir for us, but
# this test need not depend on that behavior. Be explicit.
os.makedirs(path, mode | S_ISGID)
# http://bugs.python.org/issue14992
# Should not fail when the bit is already set.
os.makedirs(path, mode, exist_ok=True)
# remove the bit.
os.chmod(path, stat.S_IMODE(os.lstat(path).st_mode) & ~S_ISGID)
# May work even when the bit is not already set when demanded.
os.makedirs(path, mode | S_ISGID, exist_ok=True)
finally:
os.umask(old_mask)
def test_exist_ok_existing_regular_file(self):
base = os_helper.TESTFN
path = os.path.join(os_helper.TESTFN, 'dir1')
with open(path, 'w') as f:
f.write('abc')
self.assertRaises(OSError, os.makedirs, path)
self.assertRaises(OSError, os.makedirs, path, exist_ok=False)
self.assertRaises(OSError, os.makedirs, path, exist_ok=True)
os.remove(path)
def tearDown(self):
path = os.path.join(os_helper.TESTFN, 'dir1', 'dir2', 'dir3',
'dir4', 'dir5', 'dir6')
# If the tests failed, the bottom-most directory ('../dir6')
# may not have been created, so we look for the outermost directory
# that exists.
while not os.path.exists(path) and path != os_helper.TESTFN:
path = os.path.dirname(path)
os.removedirs(path)
@unittest.skipUnless(hasattr(os, 'chown'), "Test needs chown")
class ChownFileTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
os.mkdir(os_helper.TESTFN)
def test_chown_uid_gid_arguments_must_be_index(self):
stat = os.stat(os_helper.TESTFN)
uid = stat.st_uid
gid = stat.st_gid
for value in (-1.0, -1j, decimal.Decimal(-1), fractions.Fraction(-2, 2)):
self.assertRaises(TypeError, os.chown, os_helper.TESTFN, value, gid)
self.assertRaises(TypeError, os.chown, os_helper.TESTFN, uid, value)
self.assertIsNone(os.chown(os_helper.TESTFN, uid, gid))
self.assertIsNone(os.chown(os_helper.TESTFN, -1, -1))
@unittest.skipUnless(hasattr(os, 'getgroups'), 'need os.getgroups')
def test_chown_gid(self):
groups = os.getgroups()
if len(groups) < 2:
self.skipTest("test needs at least 2 groups")
gid_1, gid_2 = groups[:2]
uid = os.stat(os_helper.TESTFN).st_uid
os.chown(os_helper.TESTFN, uid, gid_1)
gid = os.stat(os_helper.TESTFN).st_gid
self.assertEqual(gid, gid_1)
os.chown(os_helper.TESTFN, uid, gid_2)
gid = os.stat(os_helper.TESTFN).st_gid
self.assertEqual(gid, gid_2)
@unittest.skipUnless(root_in_posix and len(all_users) > 1,
"test needs root privilege and more than one user")
def test_chown_with_root(self):
uid_1, uid_2 = all_users[:2]
gid = os.stat(os_helper.TESTFN).st_gid
os.chown(os_helper.TESTFN, uid_1, gid)
uid = os.stat(os_helper.TESTFN).st_uid
self.assertEqual(uid, uid_1)
os.chown(os_helper.TESTFN, uid_2, gid)
uid = os.stat(os_helper.TESTFN).st_uid
self.assertEqual(uid, uid_2)
@unittest.skipUnless(not root_in_posix and len(all_users) > 1,
"test needs non-root account and more than one user")
def test_chown_without_permission(self):
uid_1, uid_2 = all_users[:2]
gid = os.stat(os_helper.TESTFN).st_gid
with self.assertRaises(PermissionError):
os.chown(os_helper.TESTFN, uid_1, gid)
os.chown(os_helper.TESTFN, uid_2, gid)
@classmethod
def tearDownClass(cls):
os.rmdir(os_helper.TESTFN)
class RemoveDirsTests(unittest.TestCase):
def setUp(self):
os.makedirs(os_helper.TESTFN)
def tearDown(self):
os_helper.rmtree(os_helper.TESTFN)
def test_remove_all(self):
dira = os.path.join(os_helper.TESTFN, 'dira')
os.mkdir(dira)
dirb = os.path.join(dira, 'dirb')
os.mkdir(dirb)
os.removedirs(dirb)
self.assertFalse(os.path.exists(dirb))
self.assertFalse(os.path.exists(dira))
self.assertFalse(os.path.exists(os_helper.TESTFN))
def test_remove_partial(self):
dira = os.path.join(os_helper.TESTFN, 'dira')
os.mkdir(dira)
dirb = os.path.join(dira, 'dirb')
os.mkdir(dirb)
create_file(os.path.join(dira, 'file.txt'))
os.removedirs(dirb)
self.assertFalse(os.path.exists(dirb))
self.assertTrue(os.path.exists(dira))
self.assertTrue(os.path.exists(os_helper.TESTFN))
def test_remove_nothing(self):
dira = os.path.join(os_helper.TESTFN, 'dira')
os.mkdir(dira)
dirb = os.path.join(dira, 'dirb')
os.mkdir(dirb)
create_file(os.path.join(dirb, 'file.txt'))
with self.assertRaises(OSError):
os.removedirs(dirb)
self.assertTrue(os.path.exists(dirb))
self.assertTrue(os.path.exists(dira))
self.assertTrue(os.path.exists(os_helper.TESTFN))
class DevNullTests(unittest.TestCase):
def test_devnull(self):
with open(os.devnull, 'wb', 0) as f:
f.write(b'hello')
f.close()
with open(os.devnull, 'rb') as f:
self.assertEqual(f.read(), b'')
class URandomTests(unittest.TestCase):
def test_urandom_length(self):
self.assertEqual(len(os.urandom(0)), 0)
self.assertEqual(len(os.urandom(1)), 1)
self.assertEqual(len(os.urandom(10)), 10)
self.assertEqual(len(os.urandom(100)), 100)
self.assertEqual(len(os.urandom(1000)), 1000)
def test_urandom_value(self):
data1 = os.urandom(16)
self.assertIsInstance(data1, bytes)
data2 = os.urandom(16)
self.assertNotEqual(data1, data2)
def get_urandom_subprocess(self, count):
code = '\n'.join((
'import os, sys',
'data = os.urandom(%s)' % count,
'sys.stdout.buffer.write(data)',
'sys.stdout.buffer.flush()'))
out = assert_python_ok('-c', code)
stdout = out[1]
self.assertEqual(len(stdout), count)
return stdout
def test_urandom_subprocess(self):
data1 = self.get_urandom_subprocess(16)
data2 = self.get_urandom_subprocess(16)
self.assertNotEqual(data1, data2)
@unittest.skipUnless(hasattr(os, 'getrandom'), 'need os.getrandom()')
class GetRandomTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
try:
os.getrandom(1)
except OSError as exc:
if exc.errno == errno.ENOSYS:
# Python compiled on a more recent Linux version
# than the current Linux kernel
raise unittest.SkipTest("getrandom() syscall fails with ENOSYS")
else:
raise
def test_getrandom_type(self):
data = os.getrandom(16)
self.assertIsInstance(data, bytes)
self.assertEqual(len(data), 16)
def test_getrandom0(self):
empty = os.getrandom(0)
self.assertEqual(empty, b'')
def test_getrandom_random(self):
self.assertTrue(hasattr(os, 'GRND_RANDOM'))
# Don't test os.getrandom(1, os.GRND_RANDOM) to not consume the rare
# resource /dev/random
def test_getrandom_nonblock(self):
# The call must not fail. Check also that the flag exists
try:
os.getrandom(1, os.GRND_NONBLOCK)
except BlockingIOError:
# System urandom is not initialized yet
pass
def test_getrandom_value(self):
data1 = os.getrandom(16)
data2 = os.getrandom(16)
self.assertNotEqual(data1, data2)
# os.urandom() doesn't use a file descriptor when it is implemented with the
# getentropy() function, the getrandom() function or the getrandom() syscall
OS_URANDOM_DONT_USE_FD = (
sysconfig.get_config_var('HAVE_GETENTROPY') == 1
or sysconfig.get_config_var('HAVE_GETRANDOM') == 1
or sysconfig.get_config_var('HAVE_GETRANDOM_SYSCALL') == 1)
@unittest.skipIf(OS_URANDOM_DONT_USE_FD ,
"os.random() does not use a file descriptor")
@unittest.skipIf(sys.platform == "vxworks",
"VxWorks can't set RLIMIT_NOFILE to 1")
class URandomFDTests(unittest.TestCase):
@unittest.skipUnless(resource, "test requires the resource module")
def test_urandom_failure(self):
# Check urandom() failing when it is not able to open /dev/random.
# We spawn a new process to make the test more robust (if getrlimit()
# failed to restore the file descriptor limit after this, the whole
# test suite would crash; this actually happened on the OS X Tiger
# buildbot).
code = """if 1:
import errno
import os
import resource
soft_limit, hard_limit = resource.getrlimit(resource.RLIMIT_NOFILE)
resource.setrlimit(resource.RLIMIT_NOFILE, (1, hard_limit))
try:
os.urandom(16)
except OSError as e:
assert e.errno == errno.EMFILE, e.errno
else:
raise AssertionError("OSError not raised")
"""
assert_python_ok('-c', code)
def test_urandom_fd_closed(self):
# Issue #21207: urandom() should reopen its fd to /dev/urandom if
# closed.
code = """if 1:
import os
import sys
import test.support
os.urandom(4)
with test.support.SuppressCrashReport():
os.closerange(3, 256)
sys.stdout.buffer.write(os.urandom(4))
"""
rc, out, err = assert_python_ok('-Sc', code)
def test_urandom_fd_reopened(self):
# Issue #21207: urandom() should detect its fd to /dev/urandom
# changed to something else, and reopen it.
self.addCleanup(os_helper.unlink, os_helper.TESTFN)
create_file(os_helper.TESTFN, b"x" * 256)
code = """if 1:
import os
import sys
import test.support
os.urandom(4)
with test.support.SuppressCrashReport():
for fd in range(3, 256):
try:
os.close(fd)
except OSError:
pass
else:
# Found the urandom fd (XXX hopefully)
break
os.closerange(3, 256)
with open({TESTFN!r}, 'rb') as f:
new_fd = f.fileno()
# Issue #26935: posix allows new_fd and fd to be equal but
# some libc implementations have dup2 return an error in this
# case.
if new_fd != fd:
os.dup2(new_fd, fd)
sys.stdout.buffer.write(os.urandom(4))
sys.stdout.buffer.write(os.urandom(4))
""".format(TESTFN=os_helper.TESTFN)
rc, out, err = assert_python_ok('-Sc', code)
self.assertEqual(len(out), 8)
self.assertNotEqual(out[0:4], out[4:8])
rc, out2, err2 = assert_python_ok('-Sc', code)
self.assertEqual(len(out2), 8)
self.assertNotEqual(out2, out)
@contextlib.contextmanager
def _execvpe_mockup(defpath=None):
"""
Stubs out execv and execve functions when used as context manager.
Records exec calls. The mock execv and execve functions always raise an
exception as they would normally never return.
"""
# A list of tuples containing (function name, first arg, args)
# of calls to execv or execve that have been made.
calls = []
def mock_execv(name, *args):
calls.append(('execv', name, args))
raise RuntimeError("execv called")
def mock_execve(name, *args):
calls.append(('execve', name, args))
raise OSError(errno.ENOTDIR, "execve called")
try:
orig_execv = os.execv
orig_execve = os.execve
orig_defpath = os.defpath
os.execv = mock_execv
os.execve = mock_execve
if defpath is not None:
os.defpath = defpath
yield calls
finally:
os.execv = orig_execv
os.execve = orig_execve
os.defpath = orig_defpath
@unittest.skipUnless(hasattr(os, 'execv'),
"need os.execv()")
class ExecTests(unittest.TestCase):
@unittest.skipIf(USING_LINUXTHREADS,
"avoid triggering a linuxthreads bug: see issue #4970")
def test_execvpe_with_bad_program(self):
self.assertRaises(OSError, os.execvpe, 'no such app-',
['no such app-'], None)
def test_execv_with_bad_arglist(self):
self.assertRaises(ValueError, os.execv, 'notepad', ())
self.assertRaises(ValueError, os.execv, 'notepad', [])
self.assertRaises(ValueError, os.execv, 'notepad', ('',))
self.assertRaises(ValueError, os.execv, 'notepad', [''])
def test_execvpe_with_bad_arglist(self):
self.assertRaises(ValueError, os.execvpe, 'notepad', [], None)
self.assertRaises(ValueError, os.execvpe, 'notepad', [], {})
self.assertRaises(ValueError, os.execvpe, 'notepad', [''], {})
@unittest.skipUnless(hasattr(os, '_execvpe'),
"No internal os._execvpe function to test.")
def _test_internal_execvpe(self, test_type):
program_path = os.sep + 'absolutepath'
if test_type is bytes:
program = b'executable'
fullpath = os.path.join(os.fsencode(program_path), program)
native_fullpath = fullpath
arguments = [b'progname', 'arg1', 'arg2']
else:
program = 'executable'
arguments = ['progname', 'arg1', 'arg2']
fullpath = os.path.join(program_path, program)
if os.name != "nt":
native_fullpath = os.fsencode(fullpath)
else:
native_fullpath = fullpath
env = {'spam': 'beans'}
# test os._execvpe() with an absolute path
with _execvpe_mockup() as calls:
self.assertRaises(RuntimeError,
os._execvpe, fullpath, arguments)
self.assertEqual(len(calls), 1)
self.assertEqual(calls[0], ('execv', fullpath, (arguments,)))
# test os._execvpe() with a relative path:
# os.get_exec_path() returns defpath
with _execvpe_mockup(defpath=program_path) as calls:
self.assertRaises(OSError,
os._execvpe, program, arguments, env=env)
self.assertEqual(len(calls), 1)
self.assertSequenceEqual(calls[0],
('execve', native_fullpath, (arguments, env)))
# test os._execvpe() with a relative path:
# os.get_exec_path() reads the 'PATH' variable
with _execvpe_mockup() as calls:
env_path = env.copy()
if test_type is bytes:
env_path[b'PATH'] = program_path
else:
env_path['PATH'] = program_path
self.assertRaises(OSError,
os._execvpe, program, arguments, env=env_path)
self.assertEqual(len(calls), 1)
self.assertSequenceEqual(calls[0],
('execve', native_fullpath, (arguments, env_path)))
def test_internal_execvpe_str(self):
self._test_internal_execvpe(str)
if os.name != "nt":
self._test_internal_execvpe(bytes)
def test_execve_invalid_env(self):
args = [sys.executable, '-c', 'pass']
# null character in the environment variable name
newenv = os.environ.copy()
newenv["FRUIT\0VEGETABLE"] = "cabbage"
with self.assertRaises(ValueError):
os.execve(args[0], args, newenv)
# null character in the environment variable value
newenv = os.environ.copy()
newenv["FRUIT"] = "orange\0VEGETABLE=cabbage"
with self.assertRaises(ValueError):
os.execve(args[0], args, newenv)
# equal character in the environment variable name
newenv = os.environ.copy()
newenv["FRUIT=ORANGE"] = "lemon"
with self.assertRaises(ValueError):
os.execve(args[0], args, newenv)
@unittest.skipUnless(sys.platform == "win32", "Win32-specific test")
def test_execve_with_empty_path(self):
# bpo-32890: Check GetLastError() misuse
try:
os.execve('', ['arg'], {})
except OSError as e:
self.assertTrue(e.winerror is None or e.winerror != 0)
else:
self.fail('No OSError raised')
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
class Win32ErrorTests(unittest.TestCase):
def setUp(self):
try:
os.stat(os_helper.TESTFN)
except FileNotFoundError:
exists = False
except OSError as exc:
exists = True
self.fail("file %s must not exist; os.stat failed with %s"
% (os_helper.TESTFN, exc))
else:
self.fail("file %s must not exist" % os_helper.TESTFN)
def test_rename(self):
self.assertRaises(OSError, os.rename, os_helper.TESTFN, os_helper.TESTFN+".bak")
def test_remove(self):
self.assertRaises(OSError, os.remove, os_helper.TESTFN)
def test_chdir(self):
self.assertRaises(OSError, os.chdir, os_helper.TESTFN)
def test_mkdir(self):
self.addCleanup(os_helper.unlink, os_helper.TESTFN)
with open(os_helper.TESTFN, "x") as f:
self.assertRaises(OSError, os.mkdir, os_helper.TESTFN)
def test_utime(self):
self.assertRaises(OSError, os.utime, os_helper.TESTFN, None)
def test_chmod(self):
self.assertRaises(OSError, os.chmod, os_helper.TESTFN, 0)
class TestInvalidFD(unittest.TestCase):
singles = ["fchdir", "dup", "fdopen", "fdatasync", "fstat",
"fstatvfs", "fsync", "tcgetpgrp", "ttyname"]
#singles.append("close")
#We omit close because it doesn't raise an exception on some platforms
def get_single(f):
def helper(self):
if hasattr(os, f):
self.check(getattr(os, f))
return helper
for f in singles:
locals()["test_"+f] = get_single(f)
def check(self, f, *args):
try:
f(os_helper.make_bad_fd(), *args)
except OSError as e:
self.assertEqual(e.errno, errno.EBADF)
else:
self.fail("%r didn't raise an OSError with a bad file descriptor"
% f)
@unittest.skipUnless(hasattr(os, 'isatty'), 'test needs os.isatty()')
def test_isatty(self):
self.assertEqual(os.isatty(os_helper.make_bad_fd()), False)
@unittest.skipUnless(hasattr(os, 'closerange'), 'test needs os.closerange()')
def test_closerange(self):
fd = os_helper.make_bad_fd()
# Make sure none of the descriptors we are about to close are
# currently valid (issue 6542).
for i in range(10):
try: os.fstat(fd+i)
except OSError:
pass
else:
break
if i < 2:
raise unittest.SkipTest(
"Unable to acquire a range of invalid file descriptors")
self.assertEqual(os.closerange(fd, fd + i-1), None)
@unittest.skipUnless(hasattr(os, 'dup2'), 'test needs os.dup2()')
def test_dup2(self):
self.check(os.dup2, 20)
@unittest.skipUnless(hasattr(os, 'fchmod'), 'test needs os.fchmod()')
def test_fchmod(self):
self.check(os.fchmod, 0)
@unittest.skipUnless(hasattr(os, 'fchown'), 'test needs os.fchown()')
def test_fchown(self):
self.check(os.fchown, -1, -1)
@unittest.skipUnless(hasattr(os, 'fpathconf'), 'test needs os.fpathconf()')
def test_fpathconf(self):
self.check(os.pathconf, "PC_NAME_MAX")
self.check(os.fpathconf, "PC_NAME_MAX")
@unittest.skipUnless(hasattr(os, 'ftruncate'), 'test needs os.ftruncate()')
def test_ftruncate(self):
self.check(os.truncate, 0)
self.check(os.ftruncate, 0)
@unittest.skipUnless(hasattr(os, 'lseek'), 'test needs os.lseek()')
def test_lseek(self):
self.check(os.lseek, 0, 0)
@unittest.skipUnless(hasattr(os, 'read'), 'test needs os.read()')
def test_read(self):
self.check(os.read, 1)
@unittest.skipUnless(hasattr(os, 'readv'), 'test needs os.readv()')
def test_readv(self):
buf = bytearray(10)
self.check(os.readv, [buf])
@unittest.skipUnless(hasattr(os, 'tcsetpgrp'), 'test needs os.tcsetpgrp()')
def test_tcsetpgrpt(self):
self.check(os.tcsetpgrp, 0)
@unittest.skipUnless(hasattr(os, 'write'), 'test needs os.write()')
def test_write(self):
self.check(os.write, b" ")
@unittest.skipUnless(hasattr(os, 'writev'), 'test needs os.writev()')
def test_writev(self):
self.check(os.writev, [b'abc'])
def test_inheritable(self):
self.check(os.get_inheritable)
self.check(os.set_inheritable, True)
@unittest.skipUnless(hasattr(os, 'get_blocking'),
'needs os.get_blocking() and os.set_blocking()')
def test_blocking(self):
self.check(os.get_blocking)
self.check(os.set_blocking, True)
class LinkTests(unittest.TestCase):
def setUp(self):
self.file1 = os_helper.TESTFN
self.file2 = os.path.join(os_helper.TESTFN + "2")
def tearDown(self):
for file in (self.file1, self.file2):
if os.path.exists(file):
os.unlink(file)
def _test_link(self, file1, file2):
create_file(file1)
try:
os.link(file1, file2)
except PermissionError as e:
self.skipTest('os.link(): %s' % e)
with open(file1, "r") as f1, open(file2, "r") as f2:
self.assertTrue(os.path.sameopenfile(f1.fileno(), f2.fileno()))
def test_link(self):
self._test_link(self.file1, self.file2)
def test_link_bytes(self):
self._test_link(bytes(self.file1, sys.getfilesystemencoding()),
bytes(self.file2, sys.getfilesystemencoding()))
def test_unicode_name(self):
try:
os.fsencode("\xf1")
except UnicodeError:
raise unittest.SkipTest("Unable to encode for this platform.")
self.file1 += "\xf1"
self.file2 = self.file1 + "2"
self._test_link(self.file1, self.file2)
@unittest.skipIf(sys.platform == "win32", "Posix specific tests")
class PosixUidGidTests(unittest.TestCase):
# uid_t and gid_t are 32-bit unsigned integers on Linux
UID_OVERFLOW = (1 << 32)
GID_OVERFLOW = (1 << 32)
@unittest.skipUnless(hasattr(os, 'setuid'), 'test needs os.setuid()')
def test_setuid(self):
if os.getuid() != 0:
self.assertRaises(OSError, os.setuid, 0)
self.assertRaises(TypeError, os.setuid, 'not an int')
self.assertRaises(OverflowError, os.setuid, self.UID_OVERFLOW)
@unittest.skipUnless(hasattr(os, 'setgid'), 'test needs os.setgid()')
def test_setgid(self):
if os.getuid() != 0 and not HAVE_WHEEL_GROUP:
self.assertRaises(OSError, os.setgid, 0)
self.assertRaises(TypeError, os.setgid, 'not an int')
self.assertRaises(OverflowError, os.setgid, self.GID_OVERFLOW)
@unittest.skipUnless(hasattr(os, 'seteuid'), 'test needs os.seteuid()')
def test_seteuid(self):
if os.getuid() != 0:
self.assertRaises(OSError, os.seteuid, 0)
self.assertRaises(TypeError, os.setegid, 'not an int')
self.assertRaises(OverflowError, os.seteuid, self.UID_OVERFLOW)
@unittest.skipUnless(hasattr(os, 'setegid'), 'test needs os.setegid()')
def test_setegid(self):
if os.getuid() != 0 and not HAVE_WHEEL_GROUP:
self.assertRaises(OSError, os.setegid, 0)
self.assertRaises(TypeError, os.setegid, 'not an int')
self.assertRaises(OverflowError, os.setegid, self.GID_OVERFLOW)
@unittest.skipUnless(hasattr(os, 'setreuid'), 'test needs os.setreuid()')
def test_setreuid(self):
if os.getuid() != 0:
self.assertRaises(OSError, os.setreuid, 0, 0)
self.assertRaises(TypeError, os.setreuid, 'not an int', 0)
self.assertRaises(TypeError, os.setreuid, 0, 'not an int')
self.assertRaises(OverflowError, os.setreuid, self.UID_OVERFLOW, 0)
self.assertRaises(OverflowError, os.setreuid, 0, self.UID_OVERFLOW)
@unittest.skipUnless(hasattr(os, 'setreuid'), 'test needs os.setreuid()')
def test_setreuid_neg1(self):
# Needs to accept -1. We run this in a subprocess to avoid
# altering the test runner's process state (issue8045).
subprocess.check_call([
sys.executable, '-c',
'import os,sys;os.setreuid(-1,-1);sys.exit(0)'])
@unittest.skipUnless(hasattr(os, 'setregid'), 'test needs os.setregid()')
def test_setregid(self):
if os.getuid() != 0 and not HAVE_WHEEL_GROUP:
self.assertRaises(OSError, os.setregid, 0, 0)
self.assertRaises(TypeError, os.setregid, 'not an int', 0)
self.assertRaises(TypeError, os.setregid, 0, 'not an int')
self.assertRaises(OverflowError, os.setregid, self.GID_OVERFLOW, 0)
self.assertRaises(OverflowError, os.setregid, 0, self.GID_OVERFLOW)
@unittest.skipUnless(hasattr(os, 'setregid'), 'test needs os.setregid()')
def test_setregid_neg1(self):
# Needs to accept -1. We run this in a subprocess to avoid
# altering the test runner's process state (issue8045).
subprocess.check_call([
sys.executable, '-c',
'import os,sys;os.setregid(-1,-1);sys.exit(0)'])
@unittest.skipIf(sys.platform == "win32", "Posix specific tests")
class Pep383Tests(unittest.TestCase):
def setUp(self):
if os_helper.TESTFN_UNENCODABLE:
self.dir = os_helper.TESTFN_UNENCODABLE
elif os_helper.TESTFN_NONASCII:
self.dir = os_helper.TESTFN_NONASCII
else:
self.dir = os_helper.TESTFN
self.bdir = os.fsencode(self.dir)
bytesfn = []
def add_filename(fn):
try:
fn = os.fsencode(fn)
except UnicodeEncodeError:
return
bytesfn.append(fn)
add_filename(os_helper.TESTFN_UNICODE)
if os_helper.TESTFN_UNENCODABLE:
add_filename(os_helper.TESTFN_UNENCODABLE)
if os_helper.TESTFN_NONASCII:
add_filename(os_helper.TESTFN_NONASCII)
if not bytesfn:
self.skipTest("couldn't create any non-ascii filename")
self.unicodefn = set()
os.mkdir(self.dir)
try:
for fn in bytesfn:
os_helper.create_empty_file(os.path.join(self.bdir, fn))
fn = os.fsdecode(fn)
if fn in self.unicodefn:
raise ValueError("duplicate filename")
self.unicodefn.add(fn)
except:
shutil.rmtree(self.dir)
raise
def tearDown(self):
shutil.rmtree(self.dir)
def test_listdir(self):
expected = self.unicodefn
found = set(os.listdir(self.dir))
self.assertEqual(found, expected)
# test listdir without arguments
current_directory = os.getcwd()
try:
os.chdir(os.sep)
self.assertEqual(set(os.listdir()), set(os.listdir(os.sep)))
finally:
os.chdir(current_directory)
def test_open(self):
for fn in self.unicodefn:
f = open(os.path.join(self.dir, fn), 'rb')
f.close()
@unittest.skipUnless(hasattr(os, 'statvfs'),
"need os.statvfs()")
def test_statvfs(self):
# issue #9645
for fn in self.unicodefn:
# should not fail with file not found error
fullname = os.path.join(self.dir, fn)
os.statvfs(fullname)
def test_stat(self):
for fn in self.unicodefn:
os.stat(os.path.join(self.dir, fn))
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
class Win32KillTests(unittest.TestCase):
def _kill(self, sig):
# Start sys.executable as a subprocess and communicate from the
# subprocess to the parent that the interpreter is ready. When it
# becomes ready, send *sig* via os.kill to the subprocess and check
# that the return code is equal to *sig*.
import ctypes
from ctypes import wintypes
import msvcrt
# Since we can't access the contents of the process' stdout until the
# process has exited, use PeekNamedPipe to see what's inside stdout
# without waiting. This is done so we can tell that the interpreter
# is started and running at a point where it could handle a signal.
PeekNamedPipe = ctypes.windll.kernel32.PeekNamedPipe
PeekNamedPipe.restype = wintypes.BOOL
PeekNamedPipe.argtypes = (wintypes.HANDLE, # Pipe handle
ctypes.POINTER(ctypes.c_char), # stdout buf
wintypes.DWORD, # Buffer size
ctypes.POINTER(wintypes.DWORD), # bytes read
ctypes.POINTER(wintypes.DWORD), # bytes avail
ctypes.POINTER(wintypes.DWORD)) # bytes left
msg = "running"
proc = subprocess.Popen([sys.executable, "-c",
"import sys;"
"sys.stdout.write('{}');"
"sys.stdout.flush();"
"input()".format(msg)],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE)
self.addCleanup(proc.stdout.close)
self.addCleanup(proc.stderr.close)
self.addCleanup(proc.stdin.close)
count, max = 0, 100
while count < max and proc.poll() is None:
# Create a string buffer to store the result of stdout from the pipe
buf = ctypes.create_string_buffer(len(msg))
# Obtain the text currently in proc.stdout
# Bytes read/avail/left are left as NULL and unused
rslt = PeekNamedPipe(msvcrt.get_osfhandle(proc.stdout.fileno()),
buf, ctypes.sizeof(buf), None, None, None)
self.assertNotEqual(rslt, 0, "PeekNamedPipe failed")
if buf.value:
self.assertEqual(msg, buf.value.decode())
break
time.sleep(0.1)
count += 1
else:
self.fail("Did not receive communication from the subprocess")
os.kill(proc.pid, sig)
self.assertEqual(proc.wait(), sig)
def test_kill_sigterm(self):
# SIGTERM doesn't mean anything special, but make sure it works
self._kill(signal.SIGTERM)
def test_kill_int(self):
# os.kill on Windows can take an int which gets set as the exit code
self._kill(100)
def _kill_with_event(self, event, name):
tagname = "test_os_%s" % uuid.uuid1()
m = mmap.mmap(-1, 1, tagname)
m[0] = 0
# Run a script which has console control handling enabled.
proc = subprocess.Popen([sys.executable,
os.path.join(os.path.dirname(__file__),
"win_console_handler.py"), tagname],
creationflags=subprocess.CREATE_NEW_PROCESS_GROUP)
# Let the interpreter startup before we send signals. See #3137.
count, max = 0, 100
while count < max and proc.poll() is None:
if m[0] == 1:
break
time.sleep(0.1)
count += 1
else:
# Forcefully kill the process if we weren't able to signal it.
os.kill(proc.pid, signal.SIGINT)
self.fail("Subprocess didn't finish initialization")
os.kill(proc.pid, event)
# proc.send_signal(event) could also be done here.
# Allow time for the signal to be passed and the process to exit.
time.sleep(0.5)
if not proc.poll():
# Forcefully kill the process if we weren't able to signal it.
os.kill(proc.pid, signal.SIGINT)
self.fail("subprocess did not stop on {}".format(name))
@unittest.skip("subprocesses aren't inheriting Ctrl+C property")
def test_CTRL_C_EVENT(self):
from ctypes import wintypes
import ctypes
# Make a NULL value by creating a pointer with no argument.
NULL = ctypes.POINTER(ctypes.c_int)()
SetConsoleCtrlHandler = ctypes.windll.kernel32.SetConsoleCtrlHandler
SetConsoleCtrlHandler.argtypes = (ctypes.POINTER(ctypes.c_int),
wintypes.BOOL)
SetConsoleCtrlHandler.restype = wintypes.BOOL
# Calling this with NULL and FALSE causes the calling process to
# handle Ctrl+C, rather than ignore it. This property is inherited
# by subprocesses.
SetConsoleCtrlHandler(NULL, 0)
self._kill_with_event(signal.CTRL_C_EVENT, "CTRL_C_EVENT")
def test_CTRL_BREAK_EVENT(self):
self._kill_with_event(signal.CTRL_BREAK_EVENT, "CTRL_BREAK_EVENT")
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
class Win32ListdirTests(unittest.TestCase):
"""Test listdir on Windows."""
def setUp(self):
self.created_paths = []
for i in range(2):
dir_name = 'SUB%d' % i
dir_path = os.path.join(os_helper.TESTFN, dir_name)
file_name = 'FILE%d' % i
file_path = os.path.join(os_helper.TESTFN, file_name)
os.makedirs(dir_path)
with open(file_path, 'w', encoding='utf-8') as f:
f.write("I'm %s and proud of it. Blame test_os.\n" % file_path)
self.created_paths.extend([dir_name, file_name])
self.created_paths.sort()
def tearDown(self):
shutil.rmtree(os_helper.TESTFN)
def test_listdir_no_extended_path(self):
"""Test when the path is not an "extended" path."""
# unicode
self.assertEqual(
sorted(os.listdir(os_helper.TESTFN)),
self.created_paths)
# bytes
self.assertEqual(
sorted(os.listdir(os.fsencode(os_helper.TESTFN))),
[os.fsencode(path) for path in self.created_paths])
def test_listdir_extended_path(self):
"""Test when the path starts with '\\\\?\\'."""
# See: http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx#maxpath
# unicode
path = '\\\\?\\' + os.path.abspath(os_helper.TESTFN)
self.assertEqual(
sorted(os.listdir(path)),
self.created_paths)
# bytes
path = b'\\\\?\\' + os.fsencode(os.path.abspath(os_helper.TESTFN))
self.assertEqual(
sorted(os.listdir(path)),
[os.fsencode(path) for path in self.created_paths])
@unittest.skipUnless(hasattr(os, 'readlink'), 'needs os.readlink()')
class ReadlinkTests(unittest.TestCase):
filelink = 'readlinktest'
filelink_target = os.path.abspath(__file__)
filelinkb = os.fsencode(filelink)
filelinkb_target = os.fsencode(filelink_target)
def assertPathEqual(self, left, right):
left = os.path.normcase(left)
right = os.path.normcase(right)
if sys.platform == 'win32':
# Bad practice to blindly strip the prefix as it may be required to
# correctly refer to the file, but we're only comparing paths here.
has_prefix = lambda p: p.startswith(
b'\\\\?\\' if isinstance(p, bytes) else '\\\\?\\')
if has_prefix(left):
left = left[4:]
if has_prefix(right):
right = right[4:]
self.assertEqual(left, right)
def setUp(self):
self.assertTrue(os.path.exists(self.filelink_target))
self.assertTrue(os.path.exists(self.filelinkb_target))
self.assertFalse(os.path.exists(self.filelink))
self.assertFalse(os.path.exists(self.filelinkb))
def test_not_symlink(self):
filelink_target = FakePath(self.filelink_target)
self.assertRaises(OSError, os.readlink, self.filelink_target)
self.assertRaises(OSError, os.readlink, filelink_target)
def test_missing_link(self):
self.assertRaises(FileNotFoundError, os.readlink, 'missing-link')
self.assertRaises(FileNotFoundError, os.readlink,
FakePath('missing-link'))
@os_helper.skip_unless_symlink
def test_pathlike(self):
os.symlink(self.filelink_target, self.filelink)
self.addCleanup(os_helper.unlink, self.filelink)
filelink = FakePath(self.filelink)
self.assertPathEqual(os.readlink(filelink), self.filelink_target)
@os_helper.skip_unless_symlink
def test_pathlike_bytes(self):
os.symlink(self.filelinkb_target, self.filelinkb)
self.addCleanup(os_helper.unlink, self.filelinkb)
path = os.readlink(FakePath(self.filelinkb))
self.assertPathEqual(path, self.filelinkb_target)
self.assertIsInstance(path, bytes)
@os_helper.skip_unless_symlink
def test_bytes(self):
os.symlink(self.filelinkb_target, self.filelinkb)
self.addCleanup(os_helper.unlink, self.filelinkb)
path = os.readlink(self.filelinkb)
self.assertPathEqual(path, self.filelinkb_target)
self.assertIsInstance(path, bytes)
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
@os_helper.skip_unless_symlink
class Win32SymlinkTests(unittest.TestCase):
filelink = 'filelinktest'
filelink_target = os.path.abspath(__file__)
dirlink = 'dirlinktest'
dirlink_target = os.path.dirname(filelink_target)
missing_link = 'missing link'
def setUp(self):
assert os.path.exists(self.dirlink_target)
assert os.path.exists(self.filelink_target)
assert not os.path.exists(self.dirlink)
assert not os.path.exists(self.filelink)
assert not os.path.exists(self.missing_link)
def tearDown(self):
if os.path.exists(self.filelink):
os.remove(self.filelink)
if os.path.exists(self.dirlink):
os.rmdir(self.dirlink)
if os.path.lexists(self.missing_link):
os.remove(self.missing_link)
def test_directory_link(self):
os.symlink(self.dirlink_target, self.dirlink)
self.assertTrue(os.path.exists(self.dirlink))
self.assertTrue(os.path.isdir(self.dirlink))
self.assertTrue(os.path.islink(self.dirlink))
self.check_stat(self.dirlink, self.dirlink_target)
def test_file_link(self):
os.symlink(self.filelink_target, self.filelink)
self.assertTrue(os.path.exists(self.filelink))
self.assertTrue(os.path.isfile(self.filelink))
self.assertTrue(os.path.islink(self.filelink))
self.check_stat(self.filelink, self.filelink_target)
def _create_missing_dir_link(self):
'Create a "directory" link to a non-existent target'
linkname = self.missing_link
if os.path.lexists(linkname):
os.remove(linkname)
target = r'c:\\target does not exist.29r3c740'
assert not os.path.exists(target)
target_is_dir = True
os.symlink(target, linkname, target_is_dir)
def test_remove_directory_link_to_missing_target(self):
self._create_missing_dir_link()
# For compatibility with Unix, os.remove will check the
# directory status and call RemoveDirectory if the symlink
# was created with target_is_dir==True.
os.remove(self.missing_link)
def test_isdir_on_directory_link_to_missing_target(self):
self._create_missing_dir_link()
self.assertFalse(os.path.isdir(self.missing_link))
def test_rmdir_on_directory_link_to_missing_target(self):
self._create_missing_dir_link()
os.rmdir(self.missing_link)
def check_stat(self, link, target):
self.assertEqual(os.stat(link), os.stat(target))
self.assertNotEqual(os.lstat(link), os.stat(link))
bytes_link = os.fsencode(link)
self.assertEqual(os.stat(bytes_link), os.stat(target))
self.assertNotEqual(os.lstat(bytes_link), os.stat(bytes_link))
def test_12084(self):
level1 = os.path.abspath(os_helper.TESTFN)
level2 = os.path.join(level1, "level2")
level3 = os.path.join(level2, "level3")
self.addCleanup(os_helper.rmtree, level1)
os.mkdir(level1)
os.mkdir(level2)
os.mkdir(level3)
file1 = os.path.abspath(os.path.join(level1, "file1"))
create_file(file1)
orig_dir = os.getcwd()
try:
os.chdir(level2)
link = os.path.join(level2, "link")
os.symlink(os.path.relpath(file1), "link")
self.assertIn("link", os.listdir(os.getcwd()))
# Check os.stat calls from the same dir as the link
self.assertEqual(os.stat(file1), os.stat("link"))
# Check os.stat calls from a dir below the link
os.chdir(level1)
self.assertEqual(os.stat(file1),
os.stat(os.path.relpath(link)))
# Check os.stat calls from a dir above the link
os.chdir(level3)
self.assertEqual(os.stat(file1),
os.stat(os.path.relpath(link)))
finally:
os.chdir(orig_dir)
@unittest.skipUnless(os.path.lexists(r'C:\Users\All Users')
and os.path.exists(r'C:\ProgramData'),
'Test directories not found')
def test_29248(self):
# os.symlink() calls CreateSymbolicLink, which creates
# the reparse data buffer with the print name stored
# first, so the offset is always 0. CreateSymbolicLink
# stores the "PrintName" DOS path (e.g. "C:\") first,
# with an offset of 0, followed by the "SubstituteName"
# NT path (e.g. "\??\C:\"). The "All Users" link, on
# the other hand, seems to have been created manually
# with an inverted order.
target = os.readlink(r'C:\Users\All Users')
self.assertTrue(os.path.samefile(target, r'C:\ProgramData'))
def test_buffer_overflow(self):
# Older versions would have a buffer overflow when detecting
# whether a link source was a directory. This test ensures we
# no longer crash, but does not otherwise validate the behavior
segment = 'X' * 27
path = os.path.join(*[segment] * 10)
test_cases = [
# overflow with absolute src
('\\' + path, segment),
# overflow dest with relative src
(segment, path),
# overflow when joining src
(path[:180], path[:180]),
]
for src, dest in test_cases:
try:
os.symlink(src, dest)
except FileNotFoundError:
pass
else:
try:
os.remove(dest)
except OSError:
pass
# Also test with bytes, since that is a separate code path.
try:
os.symlink(os.fsencode(src), os.fsencode(dest))
except FileNotFoundError:
pass
else:
try:
os.remove(dest)
except OSError:
pass
def test_appexeclink(self):
root = os.path.expandvars(r'%LOCALAPPDATA%\Microsoft\WindowsApps')
if not os.path.isdir(root):
self.skipTest("test requires a WindowsApps directory")
aliases = [os.path.join(root, a)
for a in fnmatch.filter(os.listdir(root), '*.exe')]
for alias in aliases:
if support.verbose:
print()
print("Testing with", alias)
st = os.lstat(alias)
self.assertEqual(st, os.stat(alias))
self.assertFalse(stat.S_ISLNK(st.st_mode))
self.assertEqual(st.st_reparse_tag, stat.IO_REPARSE_TAG_APPEXECLINK)
# testing the first one we see is sufficient
break
else:
self.skipTest("test requires an app execution alias")
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
class Win32JunctionTests(unittest.TestCase):
junction = 'junctiontest'
junction_target = os.path.dirname(os.path.abspath(__file__))
def setUp(self):
assert os.path.exists(self.junction_target)
assert not os.path.lexists(self.junction)
def tearDown(self):
if os.path.lexists(self.junction):
os.unlink(self.junction)
def test_create_junction(self):
_winapi.CreateJunction(self.junction_target, self.junction)
self.assertTrue(os.path.lexists(self.junction))
self.assertTrue(os.path.exists(self.junction))
self.assertTrue(os.path.isdir(self.junction))
self.assertNotEqual(os.stat(self.junction), os.lstat(self.junction))
self.assertEqual(os.stat(self.junction), os.stat(self.junction_target))
# bpo-37834: Junctions are not recognized as links.
self.assertFalse(os.path.islink(self.junction))
self.assertEqual(os.path.normcase("\\\\?\\" + self.junction_target),
os.path.normcase(os.readlink(self.junction)))
def test_unlink_removes_junction(self):
_winapi.CreateJunction(self.junction_target, self.junction)
self.assertTrue(os.path.exists(self.junction))
self.assertTrue(os.path.lexists(self.junction))
os.unlink(self.junction)
self.assertFalse(os.path.exists(self.junction))
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
class Win32NtTests(unittest.TestCase):
def test_getfinalpathname_handles(self):
nt = support.import_module('nt')
ctypes = support.import_module('ctypes')
import ctypes.wintypes
kernel = ctypes.WinDLL('Kernel32.dll', use_last_error=True)
kernel.GetCurrentProcess.restype = ctypes.wintypes.HANDLE
kernel.GetProcessHandleCount.restype = ctypes.wintypes.BOOL
kernel.GetProcessHandleCount.argtypes = (ctypes.wintypes.HANDLE,
ctypes.wintypes.LPDWORD)
# This is a pseudo-handle that doesn't need to be closed
hproc = kernel.GetCurrentProcess()
handle_count = ctypes.wintypes.DWORD()
ok = kernel.GetProcessHandleCount(hproc, ctypes.byref(handle_count))
self.assertEqual(1, ok)
before_count = handle_count.value
# The first two test the error path, __file__ tests the success path
filenames = [
r'\\?\C:',
r'\\?\NUL',
r'\\?\CONIN',
__file__,
]
for _ in range(10):
for name in filenames:
try:
nt._getfinalpathname(name)
except Exception:
# Failure is expected
pass
try:
os.stat(name)
except Exception:
pass
ok = kernel.GetProcessHandleCount(hproc, ctypes.byref(handle_count))
self.assertEqual(1, ok)
handle_delta = handle_count.value - before_count
self.assertEqual(0, handle_delta)
@os_helper.skip_unless_symlink
class NonLocalSymlinkTests(unittest.TestCase):
def setUp(self):
r"""
Create this structure:
base
\___ some_dir
"""
os.makedirs('base/some_dir')
def tearDown(self):
shutil.rmtree('base')
def test_directory_link_nonlocal(self):
"""
The symlink target should resolve relative to the link, not relative
to the current directory.
Then, link base/some_link -> base/some_dir and ensure that some_link
is resolved as a directory.
In issue13772, it was discovered that directory detection failed if
the symlink target was not specified relative to the current
directory, which was a defect in the implementation.
"""
src = os.path.join('base', 'some_link')
os.symlink('some_dir', src)
assert os.path.isdir(src)
class FSEncodingTests(unittest.TestCase):
def test_nop(self):
self.assertEqual(os.fsencode(b'abc\xff'), b'abc\xff')
self.assertEqual(os.fsdecode('abc\u0141'), 'abc\u0141')
def test_identity(self):
# assert fsdecode(fsencode(x)) == x
for fn in ('unicode\u0141', 'latin\xe9', 'ascii'):
try:
bytesfn = os.fsencode(fn)
except UnicodeEncodeError:
continue
self.assertEqual(os.fsdecode(bytesfn), fn)
class DeviceEncodingTests(unittest.TestCase):
def test_bad_fd(self):
# Return None when an fd doesn't actually exist.
self.assertIsNone(os.device_encoding(123456))
@unittest.skipUnless(os.isatty(0) and not win32_is_iot() and (sys.platform.startswith('win') or
(hasattr(locale, 'nl_langinfo') and hasattr(locale, 'CODESET'))),
'test requires a tty and either Windows or nl_langinfo(CODESET)')
def test_device_encoding(self):
encoding = os.device_encoding(0)
self.assertIsNotNone(encoding)
self.assertTrue(codecs.lookup(encoding))
class PidTests(unittest.TestCase):
@unittest.skipUnless(hasattr(os, 'getppid'), "test needs os.getppid")
def test_getppid(self):
p = subprocess.Popen([sys.executable, '-c',
'import os; print(os.getppid())'],
stdout=subprocess.PIPE)
stdout, _ = p.communicate()
# We are the parent of our subprocess
self.assertEqual(int(stdout), os.getpid())
def check_waitpid(self, code, exitcode, callback=None):
if sys.platform == 'win32':
# On Windows, os.spawnv() simply joins arguments with spaces:
# arguments need to be quoted
args = [f'"{sys.executable}"', '-c', f'"{code}"']
else:
args = [sys.executable, '-c', code]
pid = os.spawnv(os.P_NOWAIT, sys.executable, args)
if callback is not None:
callback(pid)
# don't use support.wait_process() to test directly os.waitpid()
# and os.waitstatus_to_exitcode()
pid2, status = os.waitpid(pid, 0)
self.assertEqual(os.waitstatus_to_exitcode(status), exitcode)
self.assertEqual(pid2, pid)
def test_waitpid(self):
self.check_waitpid(code='pass', exitcode=0)
def test_waitstatus_to_exitcode(self):
exitcode = 23
code = f'import sys; sys.exit({exitcode})'
self.check_waitpid(code, exitcode=exitcode)
with self.assertRaises(TypeError):
os.waitstatus_to_exitcode(0.0)
@unittest.skipUnless(sys.platform == 'win32', 'win32-specific test')
def test_waitpid_windows(self):
# bpo-40138: test os.waitpid() and os.waitstatus_to_exitcode()
# with exit code larger than INT_MAX.
STATUS_CONTROL_C_EXIT = 0xC000013A
code = f'import _winapi; _winapi.ExitProcess({STATUS_CONTROL_C_EXIT})'
self.check_waitpid(code, exitcode=STATUS_CONTROL_C_EXIT)
@unittest.skipUnless(sys.platform == 'win32', 'win32-specific test')
def test_waitstatus_to_exitcode_windows(self):
max_exitcode = 2 ** 32 - 1
for exitcode in (0, 1, 5, max_exitcode):
self.assertEqual(os.waitstatus_to_exitcode(exitcode << 8),
exitcode)
# invalid values
with self.assertRaises(ValueError):
os.waitstatus_to_exitcode((max_exitcode + 1) << 8)
with self.assertRaises(OverflowError):
os.waitstatus_to_exitcode(-1)
# Skip the test on Windows
@unittest.skipUnless(hasattr(signal, 'SIGKILL'), 'need signal.SIGKILL')
def test_waitstatus_to_exitcode_kill(self):
code = f'import time; time.sleep({support.LONG_TIMEOUT})'
signum = signal.SIGKILL
def kill_process(pid):
os.kill(pid, signum)
self.check_waitpid(code, exitcode=-signum, callback=kill_process)
class SpawnTests(unittest.TestCase):
def create_args(self, *, with_env=False, use_bytes=False):
self.exitcode = 17
filename = os_helper.TESTFN
self.addCleanup(os_helper.unlink, filename)
if not with_env:
code = 'import sys; sys.exit(%s)' % self.exitcode
else:
self.env = dict(os.environ)
# create an unique key
self.key = str(uuid.uuid4())
self.env[self.key] = self.key
# read the variable from os.environ to check that it exists
code = ('import sys, os; magic = os.environ[%r]; sys.exit(%s)'
% (self.key, self.exitcode))
with open(filename, "w") as fp:
fp.write(code)
args = [sys.executable, filename]
if use_bytes:
args = [os.fsencode(a) for a in args]
self.env = {os.fsencode(k): os.fsencode(v)
for k, v in self.env.items()}
return args
@requires_os_func('spawnl')
def test_spawnl(self):
args = self.create_args()
exitcode = os.spawnl(os.P_WAIT, args[0], *args)
self.assertEqual(exitcode, self.exitcode)
@requires_os_func('spawnle')
def test_spawnle(self):
args = self.create_args(with_env=True)
exitcode = os.spawnle(os.P_WAIT, args[0], *args, self.env)
self.assertEqual(exitcode, self.exitcode)
@requires_os_func('spawnlp')
def test_spawnlp(self):
args = self.create_args()
exitcode = os.spawnlp(os.P_WAIT, args[0], *args)
self.assertEqual(exitcode, self.exitcode)
@requires_os_func('spawnlpe')
def test_spawnlpe(self):
args = self.create_args(with_env=True)
exitcode = os.spawnlpe(os.P_WAIT, args[0], *args, self.env)
self.assertEqual(exitcode, self.exitcode)
@requires_os_func('spawnv')
def test_spawnv(self):
args = self.create_args()
exitcode = os.spawnv(os.P_WAIT, args[0], args)
self.assertEqual(exitcode, self.exitcode)
# Test for PyUnicode_FSConverter()
exitcode = os.spawnv(os.P_WAIT, FakePath(args[0]), args)
self.assertEqual(exitcode, self.exitcode)
@requires_os_func('spawnve')
def test_spawnve(self):
args = self.create_args(with_env=True)
exitcode = os.spawnve(os.P_WAIT, args[0], args, self.env)
self.assertEqual(exitcode, self.exitcode)
@requires_os_func('spawnvp')
def test_spawnvp(self):
args = self.create_args()
exitcode = os.spawnvp(os.P_WAIT, args[0], args)
self.assertEqual(exitcode, self.exitcode)
@requires_os_func('spawnvpe')
def test_spawnvpe(self):
args = self.create_args(with_env=True)
exitcode = os.spawnvpe(os.P_WAIT, args[0], args, self.env)
self.assertEqual(exitcode, self.exitcode)
@requires_os_func('spawnv')
def test_nowait(self):
args = self.create_args()
pid = os.spawnv(os.P_NOWAIT, args[0], args)
support.wait_process(pid, exitcode=self.exitcode)
@requires_os_func('spawnve')
def test_spawnve_bytes(self):
# Test bytes handling in parse_arglist and parse_envlist (#28114)
args = self.create_args(with_env=True, use_bytes=True)
exitcode = os.spawnve(os.P_WAIT, args[0], args, self.env)
self.assertEqual(exitcode, self.exitcode)
@requires_os_func('spawnl')
def test_spawnl_noargs(self):
args = self.create_args()
self.assertRaises(ValueError, os.spawnl, os.P_NOWAIT, args[0])
self.assertRaises(ValueError, os.spawnl, os.P_NOWAIT, args[0], '')
@requires_os_func('spawnle')
def test_spawnle_noargs(self):
args = self.create_args()
self.assertRaises(ValueError, os.spawnle, os.P_NOWAIT, args[0], {})
self.assertRaises(ValueError, os.spawnle, os.P_NOWAIT, args[0], '', {})
@requires_os_func('spawnv')
def test_spawnv_noargs(self):
args = self.create_args()
self.assertRaises(ValueError, os.spawnv, os.P_NOWAIT, args[0], ())
self.assertRaises(ValueError, os.spawnv, os.P_NOWAIT, args[0], [])
self.assertRaises(ValueError, os.spawnv, os.P_NOWAIT, args[0], ('',))
self.assertRaises(ValueError, os.spawnv, os.P_NOWAIT, args[0], [''])
@requires_os_func('spawnve')
def test_spawnve_noargs(self):
args = self.create_args()
self.assertRaises(ValueError, os.spawnve, os.P_NOWAIT, args[0], (), {})
self.assertRaises(ValueError, os.spawnve, os.P_NOWAIT, args[0], [], {})
self.assertRaises(ValueError, os.spawnve, os.P_NOWAIT, args[0], ('',), {})
self.assertRaises(ValueError, os.spawnve, os.P_NOWAIT, args[0], [''], {})
def _test_invalid_env(self, spawn):
args = [sys.executable, '-c', 'pass']
# null character in the environment variable name
newenv = os.environ.copy()
newenv["FRUIT\0VEGETABLE"] = "cabbage"
try:
exitcode = spawn(os.P_WAIT, args[0], args, newenv)
except ValueError:
pass
else:
self.assertEqual(exitcode, 127)
# null character in the environment variable value
newenv = os.environ.copy()
newenv["FRUIT"] = "orange\0VEGETABLE=cabbage"
try:
exitcode = spawn(os.P_WAIT, args[0], args, newenv)
except ValueError:
pass
else:
self.assertEqual(exitcode, 127)
# equal character in the environment variable name
newenv = os.environ.copy()
newenv["FRUIT=ORANGE"] = "lemon"
try:
exitcode = spawn(os.P_WAIT, args[0], args, newenv)
except ValueError:
pass
else:
self.assertEqual(exitcode, 127)
# equal character in the environment variable value
filename = os_helper.TESTFN
self.addCleanup(os_helper.unlink, filename)
with open(filename, "w") as fp:
fp.write('import sys, os\n'
'if os.getenv("FRUIT") != "orange=lemon":\n'
' raise AssertionError')
args = [sys.executable, filename]
newenv = os.environ.copy()
newenv["FRUIT"] = "orange=lemon"
exitcode = spawn(os.P_WAIT, args[0], args, newenv)
self.assertEqual(exitcode, 0)
@requires_os_func('spawnve')
def test_spawnve_invalid_env(self):
self._test_invalid_env(os.spawnve)
@requires_os_func('spawnvpe')
def test_spawnvpe_invalid_env(self):
self._test_invalid_env(os.spawnvpe)
# The introduction of this TestCase caused at least two different errors on
# *nix buildbots. Temporarily skip this to let the buildbots move along.
@unittest.skip("Skip due to platform/environment differences on *NIX buildbots")
@unittest.skipUnless(hasattr(os, 'getlogin'), "test needs os.getlogin")
class LoginTests(unittest.TestCase):
def test_getlogin(self):
user_name = os.getlogin()
self.assertNotEqual(len(user_name), 0)
@unittest.skipUnless(hasattr(os, 'getpriority') and hasattr(os, 'setpriority'),
"needs os.getpriority and os.setpriority")
class ProgramPriorityTests(unittest.TestCase):
"""Tests for os.getpriority() and os.setpriority()."""
def test_set_get_priority(self):
base = os.getpriority(os.PRIO_PROCESS, os.getpid())
os.setpriority(os.PRIO_PROCESS, os.getpid(), base + 1)
try:
new_prio = os.getpriority(os.PRIO_PROCESS, os.getpid())
if base >= 19 and new_prio <= 19:
raise unittest.SkipTest("unable to reliably test setpriority "
"at current nice level of %s" % base)
else:
self.assertEqual(new_prio, base + 1)
finally:
try:
os.setpriority(os.PRIO_PROCESS, os.getpid(), base)
except OSError as err:
if err.errno != errno.EACCES:
raise
class SendfileTestServer(asyncore.dispatcher, threading.Thread):
class Handler(asynchat.async_chat):
def __init__(self, conn):
asynchat.async_chat.__init__(self, conn)
self.in_buffer = []
self.accumulate = True
self.closed = False
self.push(b"220 ready\r\n")
def handle_read(self):
data = self.recv(4096)
if self.accumulate:
self.in_buffer.append(data)
def get_data(self):
return b''.join(self.in_buffer)
def handle_close(self):
self.close()
self.closed = True
def handle_error(self):
raise
def __init__(self, address):
threading.Thread.__init__(self)
asyncore.dispatcher.__init__(self)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.bind(address)
self.listen(5)
self.host, self.port = self.socket.getsockname()[:2]
self.handler_instance = None
self._active = False
self._active_lock = threading.Lock()
# --- public API
@property
def running(self):
return self._active
def start(self):
assert not self.running
self.__flag = threading.Event()
threading.Thread.start(self)
self.__flag.wait()
def stop(self):
assert self.running
self._active = False
self.join()
def wait(self):
# wait for handler connection to be closed, then stop the server
while not getattr(self.handler_instance, "closed", False):
time.sleep(0.001)
self.stop()
# --- internals
def run(self):
self._active = True
self.__flag.set()
while self._active and asyncore.socket_map:
self._active_lock.acquire()
asyncore.loop(timeout=0.001, count=1)
self._active_lock.release()
asyncore.close_all()
def handle_accept(self):
conn, addr = self.accept()
self.handler_instance = self.Handler(conn)
def handle_connect(self):
self.close()
handle_read = handle_connect
def writable(self):
return 0
def handle_error(self):
raise
@unittest.skipUnless(hasattr(os, 'sendfile'), "test needs os.sendfile()")
class TestSendfile(unittest.TestCase):
DATA = b"12345abcde" * 16 * 1024 # 160 KiB
SUPPORT_HEADERS_TRAILERS = not sys.platform.startswith("linux") and \
not sys.platform.startswith("solaris") and \
not sys.platform.startswith("sunos")
requires_headers_trailers = unittest.skipUnless(SUPPORT_HEADERS_TRAILERS,
'requires headers and trailers support')
requires_32b = unittest.skipUnless(sys.maxsize < 2**32,
'test is only meaningful on 32-bit builds')
@classmethod
def setUpClass(cls):
cls.key = threading_helper.threading_setup()
create_file(os_helper.TESTFN, cls.DATA)
@classmethod
def tearDownClass(cls):
threading_helper.threading_cleanup(*cls.key)
os_helper.unlink(os_helper.TESTFN)
def setUp(self):
self.server = SendfileTestServer((socket_helper.HOST, 0))
self.server.start()
self.client = socket.socket()
self.client.connect((self.server.host, self.server.port))
self.client.settimeout(1)
# synchronize by waiting for "220 ready" response
self.client.recv(1024)
self.sockno = self.client.fileno()
self.file = open(os_helper.TESTFN, 'rb')
self.fileno = self.file.fileno()
def tearDown(self):
self.file.close()
self.client.close()
if self.server.running:
self.server.stop()
self.server = None
def sendfile_wrapper(self, *args, **kwargs):
"""A higher level wrapper representing how an application is
supposed to use sendfile().
"""
while True:
try:
return os.sendfile(*args, **kwargs)
except OSError as err:
if err.errno == errno.ECONNRESET:
# disconnected
raise
elif err.errno in (errno.EAGAIN, errno.EBUSY):
# we have to retry send data
continue
else:
raise
def test_send_whole_file(self):
# normal send
total_sent = 0
offset = 0
nbytes = 4096
while total_sent < len(self.DATA):
sent = self.sendfile_wrapper(self.sockno, self.fileno, offset, nbytes)
if sent == 0:
break
offset += sent
total_sent += sent
self.assertTrue(sent <= nbytes)
self.assertEqual(offset, total_sent)
self.assertEqual(total_sent, len(self.DATA))
self.client.shutdown(socket.SHUT_RDWR)
self.client.close()
self.server.wait()
data = self.server.handler_instance.get_data()
self.assertEqual(len(data), len(self.DATA))
self.assertEqual(data, self.DATA)
def test_send_at_certain_offset(self):
# start sending a file at a certain offset
total_sent = 0
offset = len(self.DATA) // 2
must_send = len(self.DATA) - offset
nbytes = 4096
while total_sent < must_send:
sent = self.sendfile_wrapper(self.sockno, self.fileno, offset, nbytes)
if sent == 0:
break
offset += sent
total_sent += sent
self.assertTrue(sent <= nbytes)
self.client.shutdown(socket.SHUT_RDWR)
self.client.close()
self.server.wait()
data = self.server.handler_instance.get_data()
expected = self.DATA[len(self.DATA) // 2:]
self.assertEqual(total_sent, len(expected))
self.assertEqual(len(data), len(expected))
self.assertEqual(data, expected)
def test_offset_overflow(self):
# specify an offset > file size
offset = len(self.DATA) + 4096
try:
sent = os.sendfile(self.sockno, self.fileno, offset, 4096)
except OSError as e:
# Solaris can raise EINVAL if offset >= file length, ignore.
if e.errno != errno.EINVAL:
raise
else:
self.assertEqual(sent, 0)
self.client.shutdown(socket.SHUT_RDWR)
self.client.close()
self.server.wait()
data = self.server.handler_instance.get_data()
self.assertEqual(data, b'')
def test_invalid_offset(self):
with self.assertRaises(OSError) as cm:
os.sendfile(self.sockno, self.fileno, -1, 4096)
self.assertEqual(cm.exception.errno, errno.EINVAL)
def test_keywords(self):
# Keyword arguments should be supported
os.sendfile(out_fd=self.sockno, in_fd=self.fileno,
offset=0, count=4096)
if self.SUPPORT_HEADERS_TRAILERS:
os.sendfile(out_fd=self.sockno, in_fd=self.fileno,
offset=0, count=4096,
headers=(), trailers=(), flags=0)
# --- headers / trailers tests
@requires_headers_trailers
def test_headers(self):
total_sent = 0
expected_data = b"x" * 512 + b"y" * 256 + self.DATA[:-1]
sent = os.sendfile(self.sockno, self.fileno, 0, 4096,
headers=[b"x" * 512, b"y" * 256])
self.assertLessEqual(sent, 512 + 256 + 4096)
total_sent += sent
offset = 4096
while total_sent < len(expected_data):
nbytes = min(len(expected_data) - total_sent, 4096)
sent = self.sendfile_wrapper(self.sockno, self.fileno,
offset, nbytes)
if sent == 0:
break
self.assertLessEqual(sent, nbytes)
total_sent += sent
offset += sent
self.assertEqual(total_sent, len(expected_data))
self.client.close()
self.server.wait()
data = self.server.handler_instance.get_data()
self.assertEqual(hash(data), hash(expected_data))
@requires_headers_trailers
def test_trailers(self):
TESTFN2 = os_helper.TESTFN + "2"
file_data = b"abcdef"
self.addCleanup(os_helper.unlink, TESTFN2)
create_file(TESTFN2, file_data)
with open(TESTFN2, 'rb') as f:
os.sendfile(self.sockno, f.fileno(), 0, 5,
trailers=[b"123456", b"789"])
self.client.close()
self.server.wait()
data = self.server.handler_instance.get_data()
self.assertEqual(data, b"abcde123456789")
@requires_headers_trailers
@requires_32b
def test_headers_overflow_32bits(self):
self.server.handler_instance.accumulate = False
with self.assertRaises(OSError) as cm:
os.sendfile(self.sockno, self.fileno, 0, 0,
headers=[b"x" * 2**16] * 2**15)
self.assertEqual(cm.exception.errno, errno.EINVAL)
@requires_headers_trailers
@requires_32b
def test_trailers_overflow_32bits(self):
self.server.handler_instance.accumulate = False
with self.assertRaises(OSError) as cm:
os.sendfile(self.sockno, self.fileno, 0, 0,
trailers=[b"x" * 2**16] * 2**15)
self.assertEqual(cm.exception.errno, errno.EINVAL)
@requires_headers_trailers
@unittest.skipUnless(hasattr(os, 'SF_NODISKIO'),
'test needs os.SF_NODISKIO')
def test_flags(self):
try:
os.sendfile(self.sockno, self.fileno, 0, 4096,
flags=os.SF_NODISKIO)
except OSError as err:
if err.errno not in (errno.EBUSY, errno.EAGAIN):
raise
def supports_extended_attributes():
if not hasattr(os, "setxattr"):
return False
try:
with open(os_helper.TESTFN, "xb", 0) as fp:
try:
os.setxattr(fp.fileno(), b"user.test", b"")
except OSError:
return False
finally:
os_helper.unlink(os_helper.TESTFN)
return True
@unittest.skipUnless(supports_extended_attributes(),
"no non-broken extended attribute support")
# Kernels < 2.6.39 don't respect setxattr flags.
@support.requires_linux_version(2, 6, 39)
class ExtendedAttributeTests(unittest.TestCase):
def _check_xattrs_str(self, s, getxattr, setxattr, removexattr, listxattr, **kwargs):
fn = os_helper.TESTFN
self.addCleanup(os_helper.unlink, fn)
create_file(fn)
with self.assertRaises(OSError) as cm:
getxattr(fn, s("user.test"), **kwargs)
self.assertEqual(cm.exception.errno, errno.ENODATA)
init_xattr = listxattr(fn)
self.assertIsInstance(init_xattr, list)
setxattr(fn, s("user.test"), b"", **kwargs)
xattr = set(init_xattr)
xattr.add("user.test")
self.assertEqual(set(listxattr(fn)), xattr)
self.assertEqual(getxattr(fn, b"user.test", **kwargs), b"")
setxattr(fn, s("user.test"), b"hello", os.XATTR_REPLACE, **kwargs)
self.assertEqual(getxattr(fn, b"user.test", **kwargs), b"hello")
with self.assertRaises(OSError) as cm:
setxattr(fn, s("user.test"), b"bye", os.XATTR_CREATE, **kwargs)
self.assertEqual(cm.exception.errno, errno.EEXIST)
with self.assertRaises(OSError) as cm:
setxattr(fn, s("user.test2"), b"bye", os.XATTR_REPLACE, **kwargs)
self.assertEqual(cm.exception.errno, errno.ENODATA)
setxattr(fn, s("user.test2"), b"foo", os.XATTR_CREATE, **kwargs)
xattr.add("user.test2")
self.assertEqual(set(listxattr(fn)), xattr)
removexattr(fn, s("user.test"), **kwargs)
with self.assertRaises(OSError) as cm:
getxattr(fn, s("user.test"), **kwargs)
self.assertEqual(cm.exception.errno, errno.ENODATA)
xattr.remove("user.test")
self.assertEqual(set(listxattr(fn)), xattr)
self.assertEqual(getxattr(fn, s("user.test2"), **kwargs), b"foo")
setxattr(fn, s("user.test"), b"a"*1024, **kwargs)
self.assertEqual(getxattr(fn, s("user.test"), **kwargs), b"a"*1024)
removexattr(fn, s("user.test"), **kwargs)
many = sorted("user.test{}".format(i) for i in range(100))
for thing in many:
setxattr(fn, thing, b"x", **kwargs)
self.assertEqual(set(listxattr(fn)), set(init_xattr) | set(many))
def _check_xattrs(self, *args, **kwargs):
self._check_xattrs_str(str, *args, **kwargs)
os_helper.unlink(os_helper.TESTFN)
self._check_xattrs_str(os.fsencode, *args, **kwargs)
os_helper.unlink(os_helper.TESTFN)
def test_simple(self):
self._check_xattrs(os.getxattr, os.setxattr, os.removexattr,
os.listxattr)
def test_lpath(self):
self._check_xattrs(os.getxattr, os.setxattr, os.removexattr,
os.listxattr, follow_symlinks=False)
def test_fds(self):
def getxattr(path, *args):
with open(path, "rb") as fp:
return os.getxattr(fp.fileno(), *args)
def setxattr(path, *args):
with open(path, "wb", 0) as fp:
os.setxattr(fp.fileno(), *args)
def removexattr(path, *args):
with open(path, "wb", 0) as fp:
os.removexattr(fp.fileno(), *args)
def listxattr(path, *args):
with open(path, "rb") as fp:
return os.listxattr(fp.fileno(), *args)
self._check_xattrs(getxattr, setxattr, removexattr, listxattr)
@unittest.skipUnless(hasattr(os, 'get_terminal_size'), "requires os.get_terminal_size")
class TermsizeTests(unittest.TestCase):
def test_does_not_crash(self):
"""Check if get_terminal_size() returns a meaningful value.
There's no easy portable way to actually check the size of the
terminal, so let's check if it returns something sensible instead.
"""
try:
size = os.get_terminal_size()
except OSError as e:
if sys.platform == "win32" or e.errno in (errno.EINVAL, errno.ENOTTY):
# Under win32 a generic OSError can be thrown if the
# handle cannot be retrieved
self.skipTest("failed to query terminal size")
raise
self.assertGreaterEqual(size.columns, 0)
self.assertGreaterEqual(size.lines, 0)
def test_stty_match(self):
"""Check if stty returns the same results
stty actually tests stdin, so get_terminal_size is invoked on
stdin explicitly. If stty succeeded, then get_terminal_size()
should work too.
"""
try:
size = (
subprocess.check_output(
["stty", "size"], stderr=subprocess.DEVNULL, text=True
).split()
)
except (FileNotFoundError, subprocess.CalledProcessError,
PermissionError):
self.skipTest("stty invocation failed")
expected = (int(size[1]), int(size[0])) # reversed order
try:
actual = os.get_terminal_size(sys.__stdin__.fileno())
except OSError as e:
if sys.platform == "win32" or e.errno in (errno.EINVAL, errno.ENOTTY):
# Under win32 a generic OSError can be thrown if the
# handle cannot be retrieved
self.skipTest("failed to query terminal size")
raise
self.assertEqual(expected, actual)
@unittest.skipUnless(hasattr(os, 'memfd_create'), 'requires os.memfd_create')
@support.requires_linux_version(3, 17)
class MemfdCreateTests(unittest.TestCase):
def test_memfd_create(self):
fd = os.memfd_create("Hi", os.MFD_CLOEXEC)
self.assertNotEqual(fd, -1)
self.addCleanup(os.close, fd)
self.assertFalse(os.get_inheritable(fd))
with open(fd, "wb", closefd=False) as f:
f.write(b'memfd_create')
self.assertEqual(f.tell(), 12)
fd2 = os.memfd_create("Hi")
self.addCleanup(os.close, fd2)
self.assertFalse(os.get_inheritable(fd2))
class OSErrorTests(unittest.TestCase):
def setUp(self):
class Str(str):
pass
self.bytes_filenames = []
self.unicode_filenames = []
if os_helper.TESTFN_UNENCODABLE is not None:
decoded = os_helper.TESTFN_UNENCODABLE
else:
decoded = os_helper.TESTFN
self.unicode_filenames.append(decoded)
self.unicode_filenames.append(Str(decoded))
if os_helper.TESTFN_UNDECODABLE is not None:
encoded = os_helper.TESTFN_UNDECODABLE
else:
encoded = os.fsencode(os_helper.TESTFN)
self.bytes_filenames.append(encoded)
self.bytes_filenames.append(bytearray(encoded))
self.bytes_filenames.append(memoryview(encoded))
self.filenames = self.bytes_filenames + self.unicode_filenames
def test_oserror_filename(self):
funcs = [
(self.filenames, os.chdir,),
(self.filenames, os.chmod, 0o777),
(self.filenames, os.lstat,),
(self.filenames, os.open, os.O_RDONLY),
(self.filenames, os.rmdir,),
(self.filenames, os.stat,),
(self.filenames, os.unlink,),
]
if sys.platform == "win32":
funcs.extend((
(self.bytes_filenames, os.rename, b"dst"),
(self.bytes_filenames, os.replace, b"dst"),
(self.unicode_filenames, os.rename, "dst"),
(self.unicode_filenames, os.replace, "dst"),
(self.unicode_filenames, os.listdir, ),
))
else:
funcs.extend((
(self.filenames, os.listdir,),
(self.filenames, os.rename, "dst"),
(self.filenames, os.replace, "dst"),
))
if hasattr(os, "chown"):
funcs.append((self.filenames, os.chown, 0, 0))
if hasattr(os, "lchown"):
funcs.append((self.filenames, os.lchown, 0, 0))
if hasattr(os, "truncate"):
funcs.append((self.filenames, os.truncate, 0))
if hasattr(os, "chflags"):
funcs.append((self.filenames, os.chflags, 0))
if hasattr(os, "lchflags"):
funcs.append((self.filenames, os.lchflags, 0))
if hasattr(os, "chroot"):
funcs.append((self.filenames, os.chroot,))
if hasattr(os, "link"):
if sys.platform == "win32":
funcs.append((self.bytes_filenames, os.link, b"dst"))
funcs.append((self.unicode_filenames, os.link, "dst"))
else:
funcs.append((self.filenames, os.link, "dst"))
if hasattr(os, "listxattr"):
funcs.extend((
(self.filenames, os.listxattr,),
(self.filenames, os.getxattr, "user.test"),
(self.filenames, os.setxattr, "user.test", b'user'),
(self.filenames, os.removexattr, "user.test"),
))
if hasattr(os, "lchmod"):
funcs.append((self.filenames, os.lchmod, 0o777))
if hasattr(os, "readlink"):
funcs.append((self.filenames, os.readlink,))
for filenames, func, *func_args in funcs:
for name in filenames:
try:
if isinstance(name, (str, bytes)):
func(name, *func_args)
else:
with self.assertWarnsRegex(DeprecationWarning, 'should be'):
func(name, *func_args)
except OSError as err:
self.assertIs(err.filename, name, str(func))
except UnicodeDecodeError:
pass
else:
self.fail("No exception thrown by {}".format(func))
class CPUCountTests(unittest.TestCase):
def test_cpu_count(self):
cpus = os.cpu_count()
if cpus is not None:
self.assertIsInstance(cpus, int)
self.assertGreater(cpus, 0)
else:
self.skipTest("Could not determine the number of CPUs")
class FDInheritanceTests(unittest.TestCase):
def test_get_set_inheritable(self):
fd = os.open(__file__, os.O_RDONLY)
self.addCleanup(os.close, fd)
self.assertEqual(os.get_inheritable(fd), False)
os.set_inheritable(fd, True)
self.assertEqual(os.get_inheritable(fd), True)
@unittest.skipIf(fcntl is None, "need fcntl")
def test_get_inheritable_cloexec(self):
fd = os.open(__file__, os.O_RDONLY)
self.addCleanup(os.close, fd)
self.assertEqual(os.get_inheritable(fd), False)
# clear FD_CLOEXEC flag
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
flags &= ~fcntl.FD_CLOEXEC
fcntl.fcntl(fd, fcntl.F_SETFD, flags)
self.assertEqual(os.get_inheritable(fd), True)
@unittest.skipIf(fcntl is None, "need fcntl")
def test_set_inheritable_cloexec(self):
fd = os.open(__file__, os.O_RDONLY)
self.addCleanup(os.close, fd)
self.assertEqual(fcntl.fcntl(fd, fcntl.F_GETFD) & fcntl.FD_CLOEXEC,
fcntl.FD_CLOEXEC)
os.set_inheritable(fd, True)
self.assertEqual(fcntl.fcntl(fd, fcntl.F_GETFD) & fcntl.FD_CLOEXEC,
0)
def test_open(self):
fd = os.open(__file__, os.O_RDONLY)
self.addCleanup(os.close, fd)
self.assertEqual(os.get_inheritable(fd), False)
@unittest.skipUnless(hasattr(os, 'pipe'), "need os.pipe()")
def test_pipe(self):
rfd, wfd = os.pipe()
self.addCleanup(os.close, rfd)
self.addCleanup(os.close, wfd)
self.assertEqual(os.get_inheritable(rfd), False)
self.assertEqual(os.get_inheritable(wfd), False)
def test_dup(self):
fd1 = os.open(__file__, os.O_RDONLY)
self.addCleanup(os.close, fd1)
fd2 = os.dup(fd1)
self.addCleanup(os.close, fd2)
self.assertEqual(os.get_inheritable(fd2), False)
def test_dup_standard_stream(self):
fd = os.dup(1)
self.addCleanup(os.close, fd)
self.assertGreater(fd, 0)
@unittest.skipUnless(sys.platform == 'win32', 'win32-specific test')
def test_dup_nul(self):
# os.dup() was creating inheritable fds for character files.
fd1 = os.open('NUL', os.O_RDONLY)
self.addCleanup(os.close, fd1)
fd2 = os.dup(fd1)
self.addCleanup(os.close, fd2)
self.assertFalse(os.get_inheritable(fd2))
@unittest.skipUnless(hasattr(os, 'dup2'), "need os.dup2()")
def test_dup2(self):
fd = os.open(__file__, os.O_RDONLY)
self.addCleanup(os.close, fd)
# inheritable by default
fd2 = os.open(__file__, os.O_RDONLY)
self.addCleanup(os.close, fd2)
self.assertEqual(os.dup2(fd, fd2), fd2)
self.assertTrue(os.get_inheritable(fd2))
# force non-inheritable
fd3 = os.open(__file__, os.O_RDONLY)
self.addCleanup(os.close, fd3)
self.assertEqual(os.dup2(fd, fd3, inheritable=False), fd3)
self.assertFalse(os.get_inheritable(fd3))
@unittest.skipUnless(hasattr(os, 'openpty'), "need os.openpty()")
def test_openpty(self):
master_fd, slave_fd = os.openpty()
self.addCleanup(os.close, master_fd)
self.addCleanup(os.close, slave_fd)
self.assertEqual(os.get_inheritable(master_fd), False)
self.assertEqual(os.get_inheritable(slave_fd), False)
class PathTConverterTests(unittest.TestCase):
# tuples of (function name, allows fd arguments, additional arguments to
# function, cleanup function)
functions = [
('stat', True, (), None),
('lstat', False, (), None),
('access', False, (os.F_OK,), None),
('chflags', False, (0,), None),
('lchflags', False, (0,), None),
('open', False, (0,), getattr(os, 'close', None)),
]
def test_path_t_converter(self):
str_filename = os_helper.TESTFN
if os.name == 'nt':
bytes_fspath = bytes_filename = None
else:
bytes_filename = os.fsencode(os_helper.TESTFN)
bytes_fspath = FakePath(bytes_filename)
fd = os.open(FakePath(str_filename), os.O_WRONLY|os.O_CREAT)
self.addCleanup(os_helper.unlink, os_helper.TESTFN)
self.addCleanup(os.close, fd)
int_fspath = FakePath(fd)
str_fspath = FakePath(str_filename)
for name, allow_fd, extra_args, cleanup_fn in self.functions:
with self.subTest(name=name):
try:
fn = getattr(os, name)
except AttributeError:
continue
for path in (str_filename, bytes_filename, str_fspath,
bytes_fspath):
if path is None:
continue
with self.subTest(name=name, path=path):
result = fn(path, *extra_args)
if cleanup_fn is not None:
cleanup_fn(result)
with self.assertRaisesRegex(
TypeError, 'to return str or bytes'):
fn(int_fspath, *extra_args)
if allow_fd:
result = fn(fd, *extra_args) # should not fail
if cleanup_fn is not None:
cleanup_fn(result)
else:
with self.assertRaisesRegex(
TypeError,
'os.PathLike'):
fn(fd, *extra_args)
def test_path_t_converter_and_custom_class(self):
msg = r'__fspath__\(\) to return str or bytes, not %s'
with self.assertRaisesRegex(TypeError, msg % r'int'):
os.stat(FakePath(2))
with self.assertRaisesRegex(TypeError, msg % r'float'):
os.stat(FakePath(2.34))
with self.assertRaisesRegex(TypeError, msg % r'object'):
os.stat(FakePath(object()))
@unittest.skipUnless(hasattr(os, 'get_blocking'),
'needs os.get_blocking() and os.set_blocking()')
class BlockingTests(unittest.TestCase):
def test_blocking(self):
fd = os.open(__file__, os.O_RDONLY)
self.addCleanup(os.close, fd)
self.assertEqual(os.get_blocking(fd), True)
os.set_blocking(fd, False)
self.assertEqual(os.get_blocking(fd), False)
os.set_blocking(fd, True)
self.assertEqual(os.get_blocking(fd), True)
class ExportsTests(unittest.TestCase):
def test_os_all(self):
self.assertIn('open', os.__all__)
self.assertIn('walk', os.__all__)
class TestDirEntry(unittest.TestCase):
def setUp(self):
self.path = os.path.realpath(os_helper.TESTFN)
self.addCleanup(os_helper.rmtree, self.path)
os.mkdir(self.path)
def test_uninstantiable(self):
self.assertRaises(TypeError, os.DirEntry)
def test_unpickable(self):
filename = create_file(os.path.join(self.path, "file.txt"), b'python')
entry = [entry for entry in os.scandir(self.path)].pop()
self.assertIsInstance(entry, os.DirEntry)
self.assertEqual(entry.name, "file.txt")
import pickle
self.assertRaises(TypeError, pickle.dumps, entry, filename)
class TestScandir(unittest.TestCase):
check_no_resource_warning = warnings_helper.check_no_resource_warning
def setUp(self):
self.path = os.path.realpath(os_helper.TESTFN)
self.bytes_path = os.fsencode(self.path)
self.addCleanup(os_helper.rmtree, self.path)
os.mkdir(self.path)
def create_file(self, name="file.txt"):
path = self.bytes_path if isinstance(name, bytes) else self.path
filename = os.path.join(path, name)
create_file(filename, b'python')
return filename
def get_entries(self, names):
entries = dict((entry.name, entry)
for entry in os.scandir(self.path))
self.assertEqual(sorted(entries.keys()), names)
return entries
def assert_stat_equal(self, stat1, stat2, skip_fields):
if skip_fields:
for attr in dir(stat1):
if not attr.startswith("st_"):
continue
if attr in ("st_dev", "st_ino", "st_nlink"):
continue
self.assertEqual(getattr(stat1, attr),
getattr(stat2, attr),
(stat1, stat2, attr))
else:
self.assertEqual(stat1, stat2)
def test_uninstantiable(self):
scandir_iter = os.scandir(self.path)
self.assertRaises(TypeError, type(scandir_iter))
scandir_iter.close()
def test_unpickable(self):
filename = self.create_file("file.txt")
scandir_iter = os.scandir(self.path)
import pickle
self.assertRaises(TypeError, pickle.dumps, scandir_iter, filename)
scandir_iter.close()
def check_entry(self, entry, name, is_dir, is_file, is_symlink):
self.assertIsInstance(entry, os.DirEntry)
self.assertEqual(entry.name, name)
self.assertEqual(entry.path, os.path.join(self.path, name))
self.assertEqual(entry.inode(),
os.stat(entry.path, follow_symlinks=False).st_ino)
entry_stat = os.stat(entry.path)
self.assertEqual(entry.is_dir(),
stat.S_ISDIR(entry_stat.st_mode))
self.assertEqual(entry.is_file(),
stat.S_ISREG(entry_stat.st_mode))
self.assertEqual(entry.is_symlink(),
os.path.islink(entry.path))
entry_lstat = os.stat(entry.path, follow_symlinks=False)
self.assertEqual(entry.is_dir(follow_symlinks=False),
stat.S_ISDIR(entry_lstat.st_mode))
self.assertEqual(entry.is_file(follow_symlinks=False),
stat.S_ISREG(entry_lstat.st_mode))
self.assert_stat_equal(entry.stat(),
entry_stat,
os.name == 'nt' and not is_symlink)
self.assert_stat_equal(entry.stat(follow_symlinks=False),
entry_lstat,
os.name == 'nt')
def test_attributes(self):
link = hasattr(os, 'link')
symlink = os_helper.can_symlink()
dirname = os.path.join(self.path, "dir")
os.mkdir(dirname)
filename = self.create_file("file.txt")
if link:
try:
os.link(filename, os.path.join(self.path, "link_file.txt"))
except PermissionError as e:
self.skipTest('os.link(): %s' % e)
if symlink:
os.symlink(dirname, os.path.join(self.path, "symlink_dir"),
target_is_directory=True)
os.symlink(filename, os.path.join(self.path, "symlink_file.txt"))
names = ['dir', 'file.txt']
if link:
names.append('link_file.txt')
if symlink:
names.extend(('symlink_dir', 'symlink_file.txt'))
entries = self.get_entries(names)
entry = entries['dir']
self.check_entry(entry, 'dir', True, False, False)
entry = entries['file.txt']
self.check_entry(entry, 'file.txt', False, True, False)
if link:
entry = entries['link_file.txt']
self.check_entry(entry, 'link_file.txt', False, True, False)
if symlink:
entry = entries['symlink_dir']
self.check_entry(entry, 'symlink_dir', True, False, True)
entry = entries['symlink_file.txt']
self.check_entry(entry, 'symlink_file.txt', False, True, True)
def get_entry(self, name):
path = self.bytes_path if isinstance(name, bytes) else self.path
entries = list(os.scandir(path))
self.assertEqual(len(entries), 1)
entry = entries[0]
self.assertEqual(entry.name, name)
return entry
def create_file_entry(self, name='file.txt'):
filename = self.create_file(name=name)
return self.get_entry(os.path.basename(filename))
def test_current_directory(self):
filename = self.create_file()
old_dir = os.getcwd()
try:
os.chdir(self.path)
# call scandir() without parameter: it must list the content
# of the current directory
entries = dict((entry.name, entry) for entry in os.scandir())
self.assertEqual(sorted(entries.keys()),
[os.path.basename(filename)])
finally:
os.chdir(old_dir)
def test_repr(self):
entry = self.create_file_entry()
self.assertEqual(repr(entry), "<DirEntry 'file.txt'>")
def test_fspath_protocol(self):
entry = self.create_file_entry()
self.assertEqual(os.fspath(entry), os.path.join(self.path, 'file.txt'))
def test_fspath_protocol_bytes(self):
bytes_filename = os.fsencode('bytesfile.txt')
bytes_entry = self.create_file_entry(name=bytes_filename)
fspath = os.fspath(bytes_entry)
self.assertIsInstance(fspath, bytes)
self.assertEqual(fspath,
os.path.join(os.fsencode(self.path),bytes_filename))
def test_removed_dir(self):
path = os.path.join(self.path, 'dir')
os.mkdir(path)
entry = self.get_entry('dir')
os.rmdir(path)
# On POSIX, is_dir() result depends if scandir() filled d_type or not
if os.name == 'nt':
self.assertTrue(entry.is_dir())
self.assertFalse(entry.is_file())
self.assertFalse(entry.is_symlink())
if os.name == 'nt':
self.assertRaises(FileNotFoundError, entry.inode)
# don't fail
entry.stat()
entry.stat(follow_symlinks=False)
else:
self.assertGreater(entry.inode(), 0)
self.assertRaises(FileNotFoundError, entry.stat)
self.assertRaises(FileNotFoundError, entry.stat, follow_symlinks=False)
def test_removed_file(self):
entry = self.create_file_entry()
os.unlink(entry.path)
self.assertFalse(entry.is_dir())
# On POSIX, is_dir() result depends if scandir() filled d_type or not
if os.name == 'nt':
self.assertTrue(entry.is_file())
self.assertFalse(entry.is_symlink())
if os.name == 'nt':
self.assertRaises(FileNotFoundError, entry.inode)
# don't fail
entry.stat()
entry.stat(follow_symlinks=False)
else:
self.assertGreater(entry.inode(), 0)
self.assertRaises(FileNotFoundError, entry.stat)
self.assertRaises(FileNotFoundError, entry.stat, follow_symlinks=False)
def test_broken_symlink(self):
if not os_helper.can_symlink():
return self.skipTest('cannot create symbolic link')
filename = self.create_file("file.txt")
os.symlink(filename,
os.path.join(self.path, "symlink.txt"))
entries = self.get_entries(['file.txt', 'symlink.txt'])
entry = entries['symlink.txt']
os.unlink(filename)
self.assertGreater(entry.inode(), 0)
self.assertFalse(entry.is_dir())
self.assertFalse(entry.is_file()) # broken symlink returns False
self.assertFalse(entry.is_dir(follow_symlinks=False))
self.assertFalse(entry.is_file(follow_symlinks=False))
self.assertTrue(entry.is_symlink())
self.assertRaises(FileNotFoundError, entry.stat)
# don't fail
entry.stat(follow_symlinks=False)
def test_bytes(self):
self.create_file("file.txt")
path_bytes = os.fsencode(self.path)
entries = list(os.scandir(path_bytes))
self.assertEqual(len(entries), 1, entries)
entry = entries[0]
self.assertEqual(entry.name, b'file.txt')
self.assertEqual(entry.path,
os.fsencode(os.path.join(self.path, 'file.txt')))
def test_bytes_like(self):
self.create_file("file.txt")
for cls in bytearray, memoryview:
path_bytes = cls(os.fsencode(self.path))
with self.assertWarns(DeprecationWarning):
entries = list(os.scandir(path_bytes))
self.assertEqual(len(entries), 1, entries)
entry = entries[0]
self.assertEqual(entry.name, b'file.txt')
self.assertEqual(entry.path,
os.fsencode(os.path.join(self.path, 'file.txt')))
self.assertIs(type(entry.name), bytes)
self.assertIs(type(entry.path), bytes)
@unittest.skipUnless(os.listdir in os.supports_fd,
'fd support for listdir required for this test.')
def test_fd(self):
self.assertIn(os.scandir, os.supports_fd)
self.create_file('file.txt')
expected_names = ['file.txt']
if os_helper.can_symlink():
os.symlink('file.txt', os.path.join(self.path, 'link'))
expected_names.append('link')
fd = os.open(self.path, os.O_RDONLY)
try:
with os.scandir(fd) as it:
entries = list(it)
names = [entry.name for entry in entries]
self.assertEqual(sorted(names), expected_names)
self.assertEqual(names, os.listdir(fd))
for entry in entries:
self.assertEqual(entry.path, entry.name)
self.assertEqual(os.fspath(entry), entry.name)
self.assertEqual(entry.is_symlink(), entry.name == 'link')
if os.stat in os.supports_dir_fd:
st = os.stat(entry.name, dir_fd=fd)
self.assertEqual(entry.stat(), st)
st = os.stat(entry.name, dir_fd=fd, follow_symlinks=False)
self.assertEqual(entry.stat(follow_symlinks=False), st)
finally:
os.close(fd)
def test_empty_path(self):
self.assertRaises(FileNotFoundError, os.scandir, '')
def test_consume_iterator_twice(self):
self.create_file("file.txt")
iterator = os.scandir(self.path)
entries = list(iterator)
self.assertEqual(len(entries), 1, entries)
# check than consuming the iterator twice doesn't raise exception
entries2 = list(iterator)
self.assertEqual(len(entries2), 0, entries2)
def test_bad_path_type(self):
for obj in [1.234, {}, []]:
self.assertRaises(TypeError, os.scandir, obj)
def test_close(self):
self.create_file("file.txt")
self.create_file("file2.txt")
iterator = os.scandir(self.path)
next(iterator)
iterator.close()
# multiple closes
iterator.close()
with self.check_no_resource_warning():
del iterator
def test_context_manager(self):
self.create_file("file.txt")
self.create_file("file2.txt")
with os.scandir(self.path) as iterator:
next(iterator)
with self.check_no_resource_warning():
del iterator
def test_context_manager_close(self):
self.create_file("file.txt")
self.create_file("file2.txt")
with os.scandir(self.path) as iterator:
next(iterator)
iterator.close()
def test_context_manager_exception(self):
self.create_file("file.txt")
self.create_file("file2.txt")
with self.assertRaises(ZeroDivisionError):
with os.scandir(self.path) as iterator:
next(iterator)
1/0
with self.check_no_resource_warning():
del iterator
def test_resource_warning(self):
self.create_file("file.txt")
self.create_file("file2.txt")
iterator = os.scandir(self.path)
next(iterator)
with self.assertWarns(ResourceWarning):
del iterator
support.gc_collect()
# exhausted iterator
iterator = os.scandir(self.path)
list(iterator)
with self.check_no_resource_warning():
del iterator
class TestPEP519(unittest.TestCase):
# Abstracted so it can be overridden to test pure Python implementation
# if a C version is provided.
fspath = staticmethod(os.fspath)
def test_return_bytes(self):
for b in b'hello', b'goodbye', b'some/path/and/file':
self.assertEqual(b, self.fspath(b))
def test_return_string(self):
for s in 'hello', 'goodbye', 'some/path/and/file':
self.assertEqual(s, self.fspath(s))
def test_fsencode_fsdecode(self):
for p in "path/like/object", b"path/like/object":
pathlike = FakePath(p)
self.assertEqual(p, self.fspath(pathlike))
self.assertEqual(b"path/like/object", os.fsencode(pathlike))
self.assertEqual("path/like/object", os.fsdecode(pathlike))
def test_pathlike(self):
self.assertEqual('#feelthegil', self.fspath(FakePath('#feelthegil')))
self.assertTrue(issubclass(FakePath, os.PathLike))
self.assertTrue(isinstance(FakePath('x'), os.PathLike))
def test_garbage_in_exception_out(self):
vapor = type('blah', (), {})
for o in int, type, os, vapor():
self.assertRaises(TypeError, self.fspath, o)
def test_argument_required(self):
self.assertRaises(TypeError, self.fspath)
def test_bad_pathlike(self):
# __fspath__ returns a value other than str or bytes.
self.assertRaises(TypeError, self.fspath, FakePath(42))
# __fspath__ attribute that is not callable.
c = type('foo', (), {})
c.__fspath__ = 1
self.assertRaises(TypeError, self.fspath, c())
# __fspath__ raises an exception.
self.assertRaises(ZeroDivisionError, self.fspath,
FakePath(ZeroDivisionError()))
def test_pathlike_subclasshook(self):
# bpo-38878: subclasshook causes subclass checks
# true on abstract implementation.
class A(os.PathLike):
pass
self.assertFalse(issubclass(FakePath, A))
self.assertTrue(issubclass(FakePath, os.PathLike))
def test_pathlike_class_getitem(self):
self.assertIsInstance(os.PathLike[bytes], types.GenericAlias)
class TimesTests(unittest.TestCase):
def test_times(self):
times = os.times()
self.assertIsInstance(times, os.times_result)
for field in ('user', 'system', 'children_user', 'children_system',
'elapsed'):
value = getattr(times, field)
self.assertIsInstance(value, float)
if os.name == 'nt':
self.assertEqual(times.children_user, 0)
self.assertEqual(times.children_system, 0)
self.assertEqual(times.elapsed, 0)
# Only test if the C version is provided, otherwise TestPEP519 already tested
# the pure Python implementation.
if hasattr(os, "_fspath"):
class TestPEP519PurePython(TestPEP519):
"""Explicitly test the pure Python implementation of os.fspath()."""
fspath = staticmethod(os._fspath)
if __name__ == "__main__":
unittest.main()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def create_superadmin_group(apps, schema_editor):
"""
Migrates the groups to create an admin group with all permissions
granted. Replaces the delegate group to get pk=2.
- Create new delegate group. Move users and permissions to it.
- Rename the old delegate group to Admin and remove all permissions.
- If a group with the name 'Admin' (probably with pk = 4) exists, move all
users from it to the new superadmin group and delete it. If not, check for
the staff group and assign all users to the superadmin group.
In 0011_postgres_auth_group_id_sequence, the id sequence for this migration is
restarted when using postgresql.
"""
Group = apps.get_model("users", "Group")
# If no groups exists at all, skip this migration
if Group.objects.count() == 0:
return
# Get the new superadmin group (or the old delegates)
# we cannot use Group.objects.get_or_create here, because this would trigger an autoupdate
try:
superadmin = Group.objects.get(pk=2)
created_superadmin_group = False
except Group.DoesNotExist:
superadmin = Group(pk=2, name="__temp__")
superadmin.save(skip_autoupdate=True)
created_superadmin_group = True
if not created_superadmin_group:
new_delegate = Group(name="Delegates2")
new_delegate.save(skip_autoupdate=True)
new_delegate.permissions.set(superadmin.permissions.all())
superadmin.permissions.set([])
for user in superadmin.user_set.all():
user.groups.add(new_delegate)
user.groups.remove(superadmin)
finished_moving_users = False
try:
admin = Group.objects.get(name="Admin")
for user in admin.user_set.all():
user.groups.add(superadmin)
user.groups.remove(admin)
admin.delete(skip_autoupdate=True)
finished_moving_users = True
except Group.DoesNotExist:
pass
if not finished_moving_users:
try:
staff = Group.objects.get(name="Staff")
for user in staff.user_set.all():
user.groups.add(superadmin)
except Group.DoesNotExist:
pass
superadmin.name = "Admin"
superadmin.save(skip_autoupdate=True)
if not created_superadmin_group:
new_delegate.name = "Delegates"
new_delegate.save(skip_autoupdate=True)
class Migration(migrations.Migration):
dependencies = [("users", "0006_user_email")]
operations = [migrations.RunPython(create_superadmin_group)]
|
"""Handle the loading and initialization of game sessions."""
from __future__ import annotations
import copy
import lzma
import pickle
import traceback
from typing import Optional
import tcod
import color
from engine import Engine
import entity_factories
from game_map import GameWorld
import input_handlers
from procgen import generate_dungeon
# Load the background image and remove the alpha channel.
background_image = tcod.image.load("menu_background.png")[:, :, :3]
def new_game() -> Engine:
"""Return a brand new game session as an Engine instance."""
map_width = 80
map_height = 43
room_max_size = 12
room_min_size = 6
max_rooms = 25
player = copy.deepcopy(entity_factories.player)
engine = Engine(player=player)
engine.game_world = GameWorld(
engine=engine,
max_rooms=max_rooms,
room_min_size=room_min_size,
room_max_size=room_max_size,
map_width=map_width,
map_height=map_height,
)
engine.game_world.generate_floor()
engine.update_fov()
engine.message_log.add_message(
"Hello and welcome, adventurer, to the tower!", color.welcome_text
)
stick = copy.deepcopy(entity_factories.stick)
padded_clothes = copy.deepcopy(entity_factories.padded_clothes)
stick.parent = player.inventory
padded_clothes.parent = player.inventory
player.inventory.items.append(stick)
player.equipment.toggle_equip(stick, add_message=False)
player.inventory.items.append(padded_clothes)
player.equipment.toggle_equip(padded_clothes, add_message=False)
return engine
def load_game(filename: str) -> Engine:
"""Load an Engine instance from a file."""
with open(filename, "rb") as f:
engine = pickle.loads(lzma.decompress(f.read()))
assert isinstance(engine, Engine)
return engine
class MainMenu(input_handlers.BaseEventHandler):
"""Handle the main menu rendering and input."""
def on_render(self, console: tcod.Console) -> None:
"""Render the main menu on a background image."""
console.draw_semigraphics(background_image, 0, 0)
console.print(
console.width // 2,
console.height // 2 - 4,
"Dungeon Descent",
fg=color.menu_title,
alignment=tcod.CENTER,
)
console.print(
console.width // 2,
console.height - 2,
"By Jacky Lau",
fg=color.menu_title,
alignment=tcod.CENTER,
)
menu_width = 24
for i, text in enumerate(
["[Q] Quit", "[A] Play a new game", "[Z] Continue last game"]
):
console.print(
console.width // 2,
console.height // 2 - 2 + i,
text.ljust(menu_width),
fg=color.menu_text,
bg=color.black,
alignment=tcod.CENTER,
bg_blend=tcod.BKGND_ALPHA(64),
)
def ev_keydown(
self, event: tcod.event.KeyDown
) -> Optional[input_handlers.BaseEventHandler]:
if event.sym in (tcod.event.K_q, tcod.event.K_ESCAPE):
raise SystemExit()
elif event.sym == tcod.event.K_z:
try:
return input_handlers.MainGameEventHandler(load_game("savegame.sav"))
except FileNotFoundError:
return input_handlers.PopupMessage(self, "No saved game to load.")
except Exception as exc:
traceback.print_exc() # Print to stderr.
return input_handlers.PopupMessage(self, f"Failed to load save:\n{exc}")
pass
elif event.sym == tcod.event.K_a:
return input_handlers.MainGameEventHandler(new_game())
return None
|
# coding=utf-8
# Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import xml.etree.ElementTree as ET
from pathlib import Path
import datasets
_CITATION = """\
@Article{Sharjeel2016,
author="Sharjeel, Muhammad
and Nawab, Rao Muhammad Adeel
and Rayson, Paul",
title="COUNTER: corpus of Urdu news text reuse",
journal="Language Resources and Evaluation",
year="2016",
pages="1--27",
issn="1574-0218",
doi="10.1007/s10579-016-9367-2",
url="http://dx.doi.org/10.1007/s10579-016-9367-2"
}
"""
_DESCRIPTION = """\
The COrpus of Urdu News TExt Reuse (COUNTER) corpus contains 1200 documents with real examples of text reuse from the field of journalism. It has been manually annotated at document level with three levels of reuse: wholly derived, partially derived and non derived.
"""
_HOMEPAGE = "http://ucrel.lancs.ac.uk/textreuse/counter.php"
_LICENSE = (
"The corpus is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License. "
)
_DOWNLOAD_URL = "http://ucrel.lancs.ac.uk/textreuse/COUNTER.zip"
_NUM_EXAMPLES = 600
_CLASS_NAME_MAP = {"WD": "wholly_derived", "PD": "partially_derived", "ND": "not_derived"}
class Counter(datasets.GeneratorBasedBuilder):
"""Corpus of Urdu News Text Reuse"""
VERSION = datasets.Version("1.0.0")
def _info(self):
features = datasets.Features(
{
"source": {
"filename": datasets.Value("string"),
"headline": datasets.Value("string"),
"body": datasets.Value("string"),
"total_number_of_words": datasets.Value("int64"),
"total_number_of_sentences": datasets.Value("int64"),
"number_of_words_with_swr": datasets.Value("int64"),
"newspaper": datasets.Value("string"),
"newsdate": datasets.Value("string"),
"domain": datasets.ClassLabel(
names=[
"business",
"sports",
"national",
"foreign",
"showbiz",
]
),
"classification": datasets.ClassLabel(
names=["wholly_derived", "partially_derived", "not_derived"]
),
},
"derived": {
"filename": datasets.Value("string"),
"headline": datasets.Value("string"),
"body": datasets.Value("string"),
"total_number_of_words": datasets.Value("int64"),
"total_number_of_sentences": datasets.Value("int64"),
"number_of_words_with_swr": datasets.Value("int64"),
"newspaper": datasets.Value("string"),
"newsdate": datasets.Value("string"),
"domain": datasets.ClassLabel(
names=[
"business",
"sports",
"national",
"foreign",
"showbiz",
]
),
"classification": datasets.ClassLabel(
names=["wholly_derived", "partially_derived", "not_derived"]
),
},
}
)
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=features,
supervised_keys=None,
homepage=_HOMEPAGE,
license=_LICENSE,
citation=_CITATION,
)
def _split_generators(self, dl_manager):
"""Returns SplitGenerators."""
data_dir = dl_manager.download_and_extract(_DOWNLOAD_URL)
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN,
gen_kwargs={"data_dir": data_dir},
)
]
def _generate_examples(self, data_dir):
""" Yields examples. """
def parse_file(file):
tree = ET.parse(file)
root = tree.getroot()
attributes = root.attrib
headline = root.find("headline").text
body = root.find("body").text
parsed = {
"filename": attributes["filename"],
"headline": headline,
"body": body,
"total_number_of_words": int(attributes["totalnoofwords"]),
"total_number_of_sentences": int(attributes["totalnoofsentences"]),
"number_of_words_with_swr": int(attributes["noofwordswithSWR"]),
"newspaper": attributes["newspaper"],
"newsdate": attributes["newsdate"],
"domain": attributes["domain"],
"classification": _CLASS_NAME_MAP[attributes["classification"]],
}
return parsed
base_path = Path(data_dir)
base_path = base_path / "COUNTER"
files = sorted(base_path.glob(r"[0-9][0-9][0-9][0-9].xml"))
for _id, file in enumerate(files):
example = {}
with file.open(encoding="utf-8") as f:
source = parse_file(f)
example["source"] = source
derived_file = base_path / (file.stem + "p" + file.suffix)
with derived_file.open(encoding="utf-8") as f:
derived = parse_file(f)
example["derived"] = derived
yield _id, example
|
from __future__ import division, print_function
import numpy as np
from openaerostruct.geometry.utils import generate_mesh
from openaerostruct.integration.aerostruct_groups import AerostructGeometry, AerostructPoint
import openmdao.api as om
from openaerostruct.utils.constants import grav_constant
# Create a dictionary to store options about the surface
mesh_dict = {'num_y' : 5,
'num_x' : 2,
'wing_type' : 'CRM',
'symmetry' : True,
'num_twist_cp' : 5}
mesh, twist_cp = generate_mesh(mesh_dict)
surface = {
# Wing definition
'name' : 'wing', # name of the surface
'symmetry' : True, # if true, model one half of wing
# reflected across the plane y = 0
'S_ref_type' : 'wetted', # how we compute the wing area,
# can be 'wetted' or 'projected'
'fem_model_type' : 'tube',
'thickness_cp' : np.array([.1, .2, .3]),
'twist_cp' : twist_cp,
'mesh' : mesh,
# Aerodynamic performance of the lifting surface at
# an angle of attack of 0 (alpha=0).
# These CL0 and CD0 values are added to the CL and CD
# obtained from aerodynamic analysis of the surface to get
# the total CL and CD.
# These CL0 and CD0 values do not vary wrt alpha.
'CL0' : 0.0, # CL of the surface at alpha=0
'CD0' : 0.015, # CD of the surface at alpha=0
# Airfoil properties for viscous drag calculation
'k_lam' : 0.05, # percentage of chord with laminar
# flow, used for viscous drag
't_over_c_cp' : np.array([0.15]), # thickness over chord ratio (NACA0015)
'c_max_t' : .303, # chordwise location of maximum (NACA0015)
# thickness
'with_viscous' : True,
'with_wave' : False, # if true, compute wave drag
# Structural values are based on aluminum 7075
'E' : 70.e9, # [Pa] Young's modulus of the spar
'G' : 30.e9, # [Pa] shear modulus of the spar
'yield' : 500.e6 / 2.5, # [Pa] yield stress divided by 2.5 for limiting case
'mrho' : 3.e3, # [kg/m^3] material density
'fem_origin' : 0.35, # normalized chordwise location of the spar
'wing_weight_ratio' : 2.,
'struct_weight_relief' : False, # True to add the weight of the structure to the loads on the structure
'distributed_fuel_weight' : False,
# Constraints
'exact_failure_constraint' : False, # if false, use KS function
}
# Create the problem and assign the model group
prob = om.Problem()
# Add problem information as an independent variables component
indep_var_comp = om.IndepVarComp()
indep_var_comp.add_output('v', val=248.136, units='m/s')
indep_var_comp.add_output('alpha', val=5., units='deg')
indep_var_comp.add_output('Mach_number', val=0.84)
indep_var_comp.add_output('re', val=1.e6, units='1/m')
indep_var_comp.add_output('rho', val=0.38, units='kg/m**3')
indep_var_comp.add_output('CT', val=grav_constant * 17.e-6, units='1/s')
indep_var_comp.add_output('R', val=11.165e6, units='m')
indep_var_comp.add_output('W0', val=0.4 * 3e5, units='kg')
indep_var_comp.add_output('speed_of_sound', val=295.4, units='m/s')
indep_var_comp.add_output('load_factor', val=1.)
indep_var_comp.add_output('empty_cg', val=np.zeros((3)), units='m')
prob.model.add_subsystem('prob_vars',
indep_var_comp,
promotes=['*'])
aerostruct_group = AerostructGeometry(surface=surface)
name = 'wing'
# Add tmp_group to the problem with the name of the surface.
prob.model.add_subsystem(name, aerostruct_group)
point_name = 'AS_point_0'
# Create the aero point group and add it to the model
AS_point = AerostructPoint(surfaces=[surface])
prob.model.add_subsystem(point_name, AS_point,
promotes_inputs=['v', 'alpha', 'Mach_number', 're', 'rho', 'CT', 'R',
'W0', 'speed_of_sound', 'empty_cg', 'load_factor'])
com_name = point_name + '.' + name + '_perf'
prob.model.connect(name + '.local_stiff_transformed', point_name + '.coupled.' + name + '.local_stiff_transformed')
prob.model.connect(name + '.nodes', point_name + '.coupled.' + name + '.nodes')
# Connect aerodyamic mesh to coupled group mesh
prob.model.connect(name + '.mesh', point_name + '.coupled.' + name + '.mesh')
# Connect performance calculation variables
prob.model.connect(name + '.radius', com_name + '.radius')
prob.model.connect(name + '.thickness', com_name + '.thickness')
prob.model.connect(name + '.nodes', com_name + '.nodes')
prob.model.connect(name + '.cg_location', point_name + '.' + 'total_perf.' + name + '_cg_location')
prob.model.connect(name + '.structural_mass', point_name + '.' + 'total_perf.' + name + '_structural_mass')
prob.model.connect(name + '.t_over_c', com_name + '.t_over_c')
prob.driver = om.ScipyOptimizeDriver()
prob.driver.options['tol'] = 1e-9
recorder = om.SqliteRecorder("aerostruct.db")
prob.driver.add_recorder(recorder)
prob.driver.recording_options['record_derivatives'] = True
prob.driver.recording_options['includes'] = ['*']
# Setup problem and add design variables, constraint, and objective
prob.model.add_design_var('wing.twist_cp', lower=-10., upper=15.)
prob.model.add_design_var('wing.thickness_cp', lower=0.01, upper=0.5, scaler=1e2)
prob.model.add_constraint('AS_point_0.wing_perf.failure', upper=0.)
prob.model.add_constraint('AS_point_0.wing_perf.thickness_intersects', upper=0.)
# Add design variables, constraisnt, and objective on the problem
prob.model.add_design_var('alpha', lower=-10., upper=10.)
prob.model.add_constraint('AS_point_0.L_equals_W', equals=0.)
prob.model.add_objective('AS_point_0.fuelburn', scaler=1e-5)
# Set up the problem
prob.setup(check=True)
# Only run analysis
# prob.run_model()
# Run optimization
prob.run_driver()
print()
print('CL:', prob['AS_point_0.wing_perf.CL'])
print('CD:', prob['AS_point_0.wing_perf.CD'])
|
# -*- coding: utf-8 -*-
from itertools import chain
from atores import ATIVO
VITORIA = 'VITORIA'
DERROTA = 'DERROTA'
EM_ANDAMENTO = 'EM_ANDAMENTO'
class Ponto():
def __init__(self, x, y, caracter):
self.caracter = caracter
self.x = round(x)
self.y = round(y)
def __eq__(self, other):
return self.x == other.x and self.y == other.y and self.caracter == other.caracter
def __hash__(self):
return hash(self.x) ^ hash(self.y)
def __repr__(self, *args, **kwargs):
return "Ponto(%s,%s,'%s')" % (self.x, self.y, self.caracter)
class Fase():
def __init__(self, intervalo_de_colisao=1):
"""
Método que inicializa uma fase.
:param intervalo_de_colisao:
"""
self.intervalo_de_colisao = intervalo_de_colisao
self._passaros = []
self._porcos = []
self._obstaculos = []
def adicionar_obstaculo(self, *obstaculos):
"""
Adiciona obstáculos em uma fase
:param obstaculos:
"""
self._obstaculos.extend(obstaculos)
def adicionar_porco(self, *porcos):
"""
Adiciona porcos em uma fase
:param porcos:
"""
self._porcos.extend(porcos)
def adicionar_passaro(self, *passaros):
"""
Adiciona pássaros em uma fase
:param passaros:
"""
self._passaros.extend(passaros)
def status(self):
"""
Método que indica com mensagem o status do jogo
Se o jogo está em andamento (ainda tem porco ativo e pássaro ativo), retorna essa mensagem.
Se o jogo acabou com derrota (ainda existe porco ativo), retorna essa mensagem
Se o jogo acabou com vitória (não existe porco ativo), retorna essa mensagem
:return:
"""
if not self._possui_porco_ativo():
return VITORIA
elif self._possui_passaros_ativos():
return EM_ANDAMENTO
else:
return DERROTA
def lancar(self, angulo, tempo):
"""
Método que executa lógica de lançamento.
Deve escolher o primeiro pássaro não lançado da lista e chamar seu método lançar
Se não houver esse tipo de pássaro, não deve fazer nada
:param angulo: ângulo de lançamento
:param tempo: Tempo de lançamento
"""
for passaro in self._passaros:
if not passaro.foi_lancado():
passaro.lancar(angulo, tempo)
break
def calcular_pontos(self, tempo):
"""
Lógica que retorna os pontos a serem exibidos na tela.
Cada ator deve ser transformado em um Ponto.
:param tempo: tempo para o qual devem ser calculados os pontos
:return: objeto do tipo Ponto
"""
for passaro in self._passaros:
passaro.calcular_posicao(tempo)
for alvo in self._obstaculos + self._porcos:
passaro.colidir(alvo, self.intervalo_de_colisao)
passaro.colidir_com_chao()
pontos=[self._transformar_em_ponto(a) for a in self._passaros+self._obstaculos+self._porcos]
return pontos
def _transformar_em_ponto(self, ator):
return Ponto(ator.x, ator.y, ator.caracter())
def _possui_porco_ativo(self):
for porco in self._porcos:
if porco.status == ATIVO:
return True
return False
def _possui_passaros_ativos(self):
for passaro in self._passaros:
if passaro.status == ATIVO:
return True
return False
|
from django.conf.urls import url
from .controllers import generate, rearrange, home
urlpatterns = [
url(r'^generate$', generate),
url(r'^rearrange', rearrange),
url(r'^$', home),
]
|
#Copyright [2020] [Indian Institute of Science, Bangalore & Tata Institute of Fundamental Research, Mumbai]
#SPDX-License-Identifier: Apache-2.0
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from scipy.optimize import least_squares
def find_slope(data):
n = len(data)
return np.dot(data, np.arange(1,n+1))/np.sum(np.square(np.arange(1,n+1)))
def find_slope_from_regression(data):
param0 = [1,data[0]]
n = len(data)
def obj_fn(param):
return (param[1]+param[0]*np.arange(0,n)) - data
return least_squares(obj_fn,param0).x[0]
def calibrate(resolution,count):
#calibrate the model to match the deceased curve
threshold = 10 # lower threshold on dead_data
error_tolerence = 1 # tolerence on shift
slope_tolerence = 0.01 # tolerence on slope
lower_threshold = 10 # lower threshold for simulated dead_mean
upper_threshold = 200 # upper threshold for simulated dead_mean
# set the target lambdas
lambda_h_target = 0.333333
lambda_w_target = 0.333333
lambda_c_target = 0.333334
# read data from ecdp file
country='India'
infected = pd.read_csv('/home/nihesh/Documents/covid_19_bangalore/markov_simuls/simulator/python_scripts/python_scripts_CPP/data/ecdp.csv')
infected.fillna('Nodata')
infected = infected.iloc[::-1]
# read dead population
i = infected.loc[infected['countriesAndTerritories']==country]
dates1 = np.array(infected.loc[infected['countriesAndTerritories']==country]['dateRep'].values)
# make cumulative death count
dead_data = []
dead_data.append(i['deaths'].values[0])
for j in range(1,len(dates1)):
dead_data.append(dead_data[j-1] + i['deaths'].values[j])
dead_data = np.array(dead_data)
# read simulation data and consider data based on threshold
dead_simulation = pd.read_csv('/home/nihesh/Documents/covid_19_bangalore/markov_simuls/simulator/python_scripts/python_scripts_CPP/data/dead_mean.csv')['dead'].values
# keep track of the shift in data
shift_in_data = np.min(np.where(dead_data>=threshold)[0]) - 61 # to make it start from March 1st
# plot dead_data and dead_simulation. assumes that dead_data starts on March 2st and
plt.plot(dead_data[61:len(dead_data)],label='India Data')
plt.plot(np.take(dead_simulation,np.arange(0,len(dead_simulation),resolution)),'ro-', label='Simulation')
plt.xlabel('Date')
plt.grid(True)
plt.xlabel('Days (starting March 1st)')
plt.ylabel('Deceased Population')
plt.savefig('/home/nihesh/Documents/covid_19_bangalore/markov_simuls/simulator/python_scripts/python_scripts_CPP/data/combined_plot_linear_scale')
plt.close()
plt.plot(np.log10(dead_data[61:len(dead_data)]),label='India Data')
plt.plot(np.log10(np.take(dead_simulation,np.arange(0,len(dead_simulation),resolution))),'ro-', label='Simulation')
plt.xlabel('Date')
plt.grid(True)
plt.xlabel('Days (starting March 1st)')
plt.ylabel('log_10 Deceased Population')
plt.savefig('/home/nihesh/Documents/covid_19_bangalore/markov_simuls/simulator/python_scripts/python_scripts_CPP/data/combined_plot_log_scale')
plt.close()
# consider data of interest based on threshold
dead_data = dead_data[dead_data>=threshold][0:16] #Add [0:10] for NY and wuhan! #0:16 for India to cosider death data from 10-200
indices_of_interest = np.where(np.logical_and(dead_simulation>=lower_threshold, dead_simulation<=upper_threshold))
dead_simulation = dead_simulation[indices_of_interest]
# downsample simulation data
dead_simulation = np.take(dead_simulation, np.arange(0,len(dead_simulation),resolution))
# read lambda values from the simulation
lambda_h = pd.read_csv('/home/nihesh/Documents/covid_19_bangalore/markov_simuls/simulator/python_scripts/python_scripts_CPP/data/lambda H_mean.csv')['lambda H'].values[-1]
lambda_w = pd.read_csv('/home/nihesh/Documents/covid_19_bangalore/markov_simuls/simulator/python_scripts/python_scripts_CPP/data/lambda W_mean.csv')['lambda W'].values[-1]
lambda_c = pd.read_csv('/home/nihesh/Documents/covid_19_bangalore/markov_simuls/simulator/python_scripts/python_scripts_CPP/data/lambda C_mean.csv')['lambda C'].values[-1]
lambda_h_diff = (lambda_h-lambda_h_target)
lambda_w_diff = (lambda_w-lambda_w_target)
lambda_c_diff = (lambda_c-lambda_c_target)
slope_dead_simulator = find_slope_from_regression(np.log(dead_simulation))
slope_dead_data = find_slope_from_regression(np.log(dead_data))
slope_diff = slope_dead_data - slope_dead_simulator
flag = False
print("slope_dead_simulator = ", slope_dead_simulator, ". slope_dead_data = ", slope_dead_data, ". slope_diff",slope_diff)
print("lambda_h_diff = ",lambda_h_diff,". lambda_w_diff = ",lambda_w_diff,". lambda_c_diff = ",lambda_c_diff)
# if slopes match, report delay
count_less_than_30 = 5
if abs(lambda_h_diff)<0.01 and abs(lambda_w_diff)<0.01 and abs(lambda_c_diff)<0.01 and abs(slope_diff)<slope_tolerence:
flag = True
return [flag, 1, 0, 0, 0, shift_in_data - 1 - indices_of_interest[0][0]/resolution]
# if not, calibrate for slope
else:
step_beta_h = -1*lambda_h_diff/(3+count)
step_beta_w = -1*lambda_w_diff/(3+count)
step_beta_c = -1*lambda_c_diff/(3+count)
beta_scale_factor = max(min(np.exp(slope_diff),1.5), 0.66)
if (count>=30):
beta_scale_factor = max(min(np.exp(slope_diff/(count-25)),1.5), 0.66)
elif (abs(lambda_h_diff)<0.02 and abs(lambda_w_diff)<0.02 and abs(lambda_c_diff)<0.02):
beta_scale_factor = max(min(np.exp(slope_diff/(count_less_than_30)),1.5), 0.66)
count_less_than_30 += 1
return [flag, beta_scale_factor, step_beta_h, step_beta_w, step_beta_c,shift_in_data - 1- indices_of_interest[0][0]/resolution]
|
import json
import pytest
from monty.io import zopen
from emmet.core.vasp.calc_types import RunType, TaskType, run_type, task_type
from emmet.core.vasp.task import TaskDocument
from emmet.core.vasp.validation import ValidationDoc
def test_task_type():
# TODO: Switch this to actual inputs?
input_types = [
("NSCF Line", {"incar": {"ICHARG": 11}, "kpoints": {"labels": ["A"]}}),
("NSCF Uniform", {"incar": {"ICHARG": 11}}),
("Dielectric", {"incar": {"LEPSILON": True}}),
("DFPT Dielectric", {"incar": {"LEPSILON": True, "IBRION": 7}}),
("DFPT Dielectric", {"incar": {"LEPSILON": True, "IBRION": 8}}),
("DFPT", {"incar": {"IBRION": 7}}),
("DFPT", {"incar": {"IBRION": 8}}),
("Static", {"incar": {"NSW": 0}}),
]
for _type, inputs in input_types:
assert task_type(inputs) == TaskType(_type)
def test_run_type():
params_sets = [
("GGA", {"GGA": "--"}),
("GGA+U", {"GGA": "--", "LDAU": True}),
("SCAN", {"METAGGA": "Scan"}),
("SCAN+U", {"METAGGA": "Scan", "LDAU": True}),
("R2SCAN", {"METAGGA": "R2SCAN"}),
("R2SCAN+U", {"METAGGA": "R2SCAN", "LDAU": True}),
]
for _type, params in params_sets:
assert run_type(params) == RunType(_type)
@pytest.fixture(scope="session")
def tasks(test_dir):
with zopen(test_dir / "test_si_tasks.json.gz") as f:
data = json.load(f)
return [TaskDocument(**d) for d in data]
def test_validator(tasks):
validation_docs = [ValidationDoc.from_task_doc(task) for task in tasks]
assert len(validation_docs) == len(tasks)
assert all(doc.valid for doc in validation_docs)
def test_computed_entry(tasks):
entries = [task.entry for task in tasks]
ids = {e.entry_id for e in entries}
assert ids == {"mp-1141021", "mp-149", "mp-1686587", "mp-1440634"}
@pytest.fixture(scope="session")
def task_ldau(test_dir):
with zopen(test_dir / "test_task.json") as f:
data = json.load(f)
return TaskDocument(**data)
def test_ldau(task_ldau):
assert task_ldau.run_type == RunType.GGA_U
assert ValidationDoc.from_task_doc(task_ldau).valid is False
def test_ldau_validation(test_dir):
with open(test_dir / "old_aflow_ggau_task.json") as f:
data = json.load(f)
task = TaskDocument(**data)
assert task.run_type == "GGA+U"
valid = ValidationDoc.from_task_doc(task)
assert valid.valid
|
import datetime
import json
import locale
import os
import shutil
from types import ModuleType
from typing import Union
import numpy as np
import pandas as pd
import pytest
from freezegun import freeze_time
import great_expectations as ge
from great_expectations.core import (
ExpectationConfiguration,
ExpectationSuite,
ExpectationValidationResult,
expectationSuiteSchema,
)
from great_expectations.data_context.types.resource_identifiers import (
ExpectationSuiteIdentifier,
)
from great_expectations.data_context.util import file_relative_path
from great_expectations.dataset.pandas_dataset import PandasDataset
from great_expectations.datasource import SqlAlchemyDatasource
from great_expectations.util import import_library_module
from .test_utils import expectationSuiteValidationResultSchema, get_dataset
###
#
# NOTE: THESE TESTS ARE WRITTEN WITH THE en_US.UTF-8 LOCALE AS DEFAULT FOR STRING FORMATTING
#
###
locale.setlocale(locale.LC_ALL, "en_US.UTF-8")
def pytest_configure(config):
config.addinivalue_line(
"markers",
"smoketest: mark test as smoketest--it does not have useful assertions but may produce side effects "
"that require manual inspection.",
)
config.addinivalue_line(
"markers",
"rendered_output: produces rendered output that should be manually reviewed.",
)
config.addinivalue_line(
"markers",
"aws_integration: runs aws integration test that may be very slow and requires credentials",
)
def pytest_addoption(parser):
parser.addoption(
"--no-spark",
action="store_true",
help="If set, suppress all tests against the spark test suite",
)
parser.addoption(
"--no-sqlalchemy",
action="store_true",
help="If set, suppress all tests using sqlalchemy",
)
parser.addoption(
"--no-postgresql",
action="store_true",
help="If set, suppress all tests against postgresql",
)
parser.addoption(
"--mysql", action="store_true", help="If set, execute tests against mysql",
)
parser.addoption(
"--mssql", action="store_true", help="If set, execute tests against mssql",
)
parser.addoption(
"--aws-integration",
action="store_true",
help="If set, run aws integration tests",
)
def build_test_backends_list(metafunc):
test_backends = ["PandasDataset"]
no_spark = metafunc.config.getoption("--no-spark")
if not no_spark:
try:
from pyspark.sql import SparkSession
except ImportError:
raise ValueError("spark tests are requested, but pyspark is not installed")
test_backends += ["SparkDFDataset"]
no_sqlalchemy = metafunc.config.getoption("--no-sqlalchemy")
if not no_sqlalchemy:
test_backends += ["sqlite"]
sa: Union[ModuleType, None] = import_library_module(module_name="sqlalchemy")
no_postgresql = metafunc.config.getoption("--no-postgresql")
if not (sa is None or no_postgresql):
###
# NOTE: 20190918 - JPC: Since I've had to relearn this a few times, a note here.
# SQLALCHEMY coerces postgres DOUBLE_PRECISION to float, which loses precision
# round trip compared to NUMERIC, which stays as a python DECIMAL
# Be sure to ensure that tests (and users!) understand that subtlety,
# which can be important for distributional expectations, for example.
###
try:
engine = sa.create_engine("postgresql://postgres@localhost/test_ci")
conn = engine.connect()
conn.close()
except (ImportError, sa.exc.SQLAlchemyError):
raise ImportError(
"postgresql tests are requested, but unable to connect to the postgresql database at "
"'postgresql://postgres@localhost/test_ci'"
)
test_backends += ["postgresql"]
mysql = metafunc.config.getoption("--mysql")
if sa and mysql:
try:
engine = sa.create_engine("mysql+pymysql://root@localhost/test_ci")
conn = engine.connect()
conn.close()
except (ImportError, sa.exc.SQLAlchemyError):
raise ImportError(
"mysql tests are requested, but unable to connect to the mysql database at "
"'mysql+pymysql://root@localhost/test_ci'"
)
test_backends += ["mysql"]
mssql = metafunc.config.getoption("--mssql")
if sa and mssql:
try:
engine = sa.create_engine(
"mssql+pyodbc://sa:ReallyStrongPwd1234%^&*@localhost:1433/test_ci?driver=ODBC Driver 17 for SQL Server&charset=utf8&autocommit=true",
# echo=True,
)
conn = engine.connect()
conn.close()
except (ImportError, sa.exc.SQLAlchemyError):
raise ImportError(
"mssql tests are requested, but unable to connect to the mssql database at "
"'mssql+pyodbc://sa:ReallyStrongPwd1234%^&*@localhost:1433/test_ci?driver=ODBC Driver 17 for SQL Server&charset=utf8&autocommit=true'",
)
test_backends += ["mssql"]
return test_backends
def pytest_generate_tests(metafunc):
test_backends = build_test_backends_list(metafunc)
if "test_backend" in metafunc.fixturenames:
metafunc.parametrize("test_backend", test_backends, scope="module")
if "test_backends" in metafunc.fixturenames:
metafunc.parametrize("test_backends", [test_backends], scope="module")
def pytest_collection_modifyitems(config, items):
if config.getoption("--aws-integration"):
# --aws-integration given in cli: do not skip aws-integration tests
return
skip_aws_integration = pytest.mark.skip(
reason="need --aws-integration option to run"
)
for item in items:
if "aws_integration" in item.keywords:
item.add_marker(skip_aws_integration)
@pytest.fixture(autouse=True)
def no_usage_stats(monkeypatch):
# Do not generate usage stats from test runs
monkeypatch.setenv("GE_USAGE_STATS", "False")
@pytest.fixture
def sa(test_backends):
if (
"postgresql" not in test_backends
and "sqlite" not in test_backends
and "mysql" not in test_backends
and "mssql" not in test_backends
):
pytest.skip("No recognized sqlalchemy backend selected.")
else:
import sqlalchemy as sa
return sa
@pytest.fixture
def spark_session(test_backends):
if "SparkDFDataset" not in test_backends:
pytest.skip("No spark backend selected.")
from pyspark.sql import SparkSession
return SparkSession.builder.getOrCreate()
@pytest.fixture
def empty_expectation_suite():
expectation_suite = {
"expectation_suite_name": "default",
"meta": {},
"expectations": [],
}
return expectation_suite
@pytest.fixture
def basic_expectation_suite():
expectation_suite = ExpectationSuite(
expectation_suite_name="default",
meta={},
expectations=[
ExpectationConfiguration(
expectation_type="expect_column_to_exist",
kwargs={"column": "infinities"},
),
ExpectationConfiguration(
expectation_type="expect_column_to_exist", kwargs={"column": "nulls"}
),
ExpectationConfiguration(
expectation_type="expect_column_to_exist", kwargs={"column": "naturals"}
),
ExpectationConfiguration(
expectation_type="expect_column_values_to_be_unique",
kwargs={"column": "naturals"},
),
],
)
return expectation_suite
@pytest.fixture
def file_data_asset(tmp_path):
tmp_path = str(tmp_path)
path = os.path.join(tmp_path, "file_data_asset.txt")
with open(path, "w+") as file:
file.write(json.dumps([0, 1, 2, 3, 4]))
return ge.data_asset.FileDataAsset(file_path=path)
@pytest.fixture
def numeric_high_card_dict():
data = {
"norm_0_1": [
0.7225866251125405,
-0.5951819764073379,
-0.2679313226299394,
-0.22503289285616823,
0.1432092195399402,
1.1874676802669433,
1.2766412196640815,
0.15197071140718296,
-0.08787273509474242,
-0.14524643717509128,
-1.236408169492396,
-0.1595432263317598,
1.0856768114741797,
0.5082788229519655,
0.26419244684748955,
-0.2532308428977167,
-0.6362679196021943,
-3.134120304969242,
-1.8990888524318292,
0.15701781863102648,
-0.775788419966582,
-0.7400872167978756,
-0.10578357492485335,
0.30287010067847436,
-1.2127058770179304,
-0.6750567678010801,
0.3341434318919877,
1.8336516507046157,
1.105410842250908,
-0.7711783703442725,
-0.20834347267477862,
-0.06315849766945486,
0.003016997583954831,
-1.0500016329150343,
-0.9168020284223636,
0.306128397266698,
1.0980602112281863,
-0.10465519493772572,
0.4557797534454941,
-0.2524452955086468,
-1.6176089110359837,
0.46251282530754667,
0.45751208998354903,
0.4222844954971609,
0.9651098606162691,
-0.1364401431697167,
-0.4988616288584964,
-0.29549238375582904,
0.6950204582392359,
0.2975369992016046,
-1.0159498719807218,
1.3704532401348395,
1.1210419577766673,
1.2051869452003332,
0.10749349867353084,
-3.1876892257116562,
1.316240976262548,
-1.3777452919511493,
-1.0666211985935259,
1.605446695828751,
-0.39682821266996865,
-0.2828059717857655,
1.30488698803017,
-2.116606225467923,
-0.2026680301462151,
-0.05504008273574069,
-0.028520163428411835,
0.4424105678123449,
-0.3427628263418371,
0.23805293411919937,
-0.7515414823259695,
-0.1272505897548366,
1.803348436304099,
-2.0178252709022124,
0.4860300090112474,
1.2304054166426217,
0.7228668982068365,
1.7400607500575112,
0.3480274098246697,
-0.3887978895385282,
-1.6511926233909175,
0.14517929503564567,
-1.1599010576123796,
-0.016133552438119002,
0.47157644883706273,
0.27657785075518254,
1.4464286976282463,
-1.2605489185634533,
-1.2548765025615338,
0.0755319579826929,
1.0476733637516833,
-0.7038690219524807,
-0.9580696842862921,
-0.18135657098008018,
-0.18163993379314564,
0.4092798531146971,
-2.049808182546896,
-1.2447062617916826,
-1.6681140306283337,
1.0709944517933483,
-0.7059385234342846,
-0.8033587669003331,
-1.8152275905903312,
0.11729996097670137,
2.2994900038012376,
-0.1291192451734159,
-0.6731565869164164,
-0.06690994571366346,
-0.40330072968473235,
-0.23927186025094221,
2.7756216937096676,
0.06441299443146056,
-0.5095247173507204,
-0.5228853558871007,
0.806629654091097,
-2.110096084114651,
-0.1233374136509439,
-1.021178519845751,
0.058906278340351045,
-0.26316852406211017,
-1.2990807244026237,
-0.1937986598084067,
0.3909222793445317,
0.578027315076297,
-0.11837271520846208,
-1.134297652720464,
0.496915417153268,
-0.5315184110418045,
0.5284176849952198,
-1.6810338988102331,
0.41220454054009154,
1.0554031136792,
-1.4222775023918832,
-1.1664353586956209,
0.018952180522661358,
-0.04620616876577671,
-0.8446292647938418,
-0.6889432180332509,
-0.16012081070647954,
0.5680940644754282,
-1.9792941921407943,
0.35441842206114726,
0.12433268557499534,
0.25366905921805377,
0.6262297786892028,
1.327981424671081,
1.774834324890265,
-0.9725604763128438,
0.42824027889428,
0.19725541390327114,
1.4640606982992412,
1.6484993842838995,
0.009848260786412894,
-2.318740403198263,
-0.4125245127403577,
-0.15500831770388285,
1.010740123094443,
0.7509498708766653,
-0.021415407776108144,
0.6466776546788641,
-1.421096837521404,
0.5632248951325018,
-1.230539161899903,
-0.26766333435961503,
-1.7208241092827994,
-1.068122926814994,
-1.6339248620455546,
0.07225436117508208,
-1.2018233250224348,
-0.07213000691963527,
-1.0080992229563746,
-1.151378048476321,
-0.2660104149809121,
1.6307779136408695,
0.8394822016824073,
-0.23362802143120032,
-0.36799502320054384,
0.35359852278856263,
0.5830948999779656,
-0.730683771776052,
1.4715728371820667,
-1.0668090648998136,
-1.025762014881618,
0.21056106958224155,
-0.5141254207774576,
-0.1592942838690149,
0.7688711617969363,
-2.464535892598544,
-0.33306989349452987,
0.9457207224940593,
0.36108072442574435,
-0.6490066877470516,
-0.8714147266896871,
0.6567118414749348,
-0.18543305444915045,
0.11156511615955596,
0.7299392157186994,
-0.9902398239693843,
-1.3231344439063761,
-1.1402773433114928,
0.3696183719476138,
-1.0512718152423168,
-0.6093518314203102,
0.0010622538704462257,
-0.17676306948277776,
-0.6291120128576891,
1.6390197341434742,
-0.8105788162716191,
-2.0105672384392204,
-0.7909143328024505,
-0.10510684692203587,
-0.013384480496840259,
0.37683659744804815,
-0.15123337965442354,
1.8427651248902048,
1.0371006855495906,
0.29198928612503655,
-1.7455852392709181,
1.0854545339796853,
1.8156620972829793,
1.2399563224061596,
1.1196530775769857,
0.4349954478175989,
0.11093680938321168,
0.9945934589378227,
-0.5779739742428905,
1.0398502505219054,
-0.09401160691650227,
0.22793239636661505,
-1.8664992140331715,
-0.16104499274010126,
-0.8497511318264537,
-0.005035074822415585,
-1.7956896952184151,
1.8304783101189757,
0.19094408763231646,
1.3353023874309002,
0.5889134606052353,
-0.48487660139277866,
0.4817014755127622,
1.5981632863770983,
2.1416849775567943,
-0.5524061711669017,
0.3364804821524787,
-0.8609687548167294,
0.24548635047971906,
-0.1281468603588133,
-0.03871410517044196,
-0.2678174852638268,
0.41800607312114096,
-0.2503930647517959,
0.8432391494945226,
-0.5684563173706987,
-0.6737077809046504,
2.0559579098493606,
-0.29098826888414253,
-0.08572747304559661,
-0.301857666880195,
-0.3446199959065524,
0.7391340848217359,
-0.3087136212446006,
0.5245553707204758,
-3.063281336805349,
0.47471623010413705,
0.3733427291759615,
-0.26216851429591426,
-0.5433523111756248,
0.3305385199964823,
-1.4866150542941634,
-0.4699911958560942,
0.7312367186673805,
-0.22346998944216903,
-0.4102860865811592,
-0.3003478250288424,
-0.3436168605845268,
0.9456524589400904,
-0.03710285453384255,
0.10330609878001526,
0.6919858329179392,
0.8673477607085118,
0.380742577915601,
0.5785785515837437,
-0.011421905830097267,
0.587187810965595,
-1.172536467775141,
-0.532086162097372,
-0.34440413367820183,
-1.404900386188497,
-0.1916375229779241,
1.6910999461291834,
-0.6070351182769795,
-0.8371447893868493,
0.8853944070432224,
1.4062946075925473,
-0.4575973141608374,
1.1458755768004445,
0.2619874618238163,
1.7105876844856704,
-1.3938976454537522,
-0.11403217166441704,
-1.0354305240085717,
-0.4285770475062154,
0.10326635421187867,
0.6911853442971228,
0.6293835213179542,
-0.819693698713199,
-0.7378190403744175,
-1.495947672573938,
-1.2406693914431872,
-1.0486341638186725,
-1.3715759883075953,
3.585407817418151,
-0.8007079372574223,
-1.527336776754733,
-0.4716571043072485,
-0.6967311271405545,
1.0003347462169225,
-0.30569565002022697,
0.3646134876772732,
0.49083033603832493,
0.07754580794955847,
-0.13467337850920083,
0.02134473458605164,
0.5025183900540823,
-0.940929087894874,
1.441600637127558,
-0.0857298131221344,
-0.575175243519591,
0.42622029657630595,
-0.3239674701415489,
0.22648849821602596,
-0.6636465305318631,
0.30415000329164754,
-0.6170241274574016,
0.07578674772163065,
0.2952841441615124,
0.8120317689468056,
-0.46861353019671337,
0.04718559572470416,
-0.3105660017232523,
-0.28898463203535724,
0.9575298065734561,
-0.1977556031830993,
0.009658232624257272,
1.1432743259603295,
-1.8989396918936858,
0.20787070770386357,
1.4256750543782999,
-0.03838329973778874,
-0.9051229357470373,
-1.2002277085489457,
2.405569956130733,
1.895817948326675,
-0.8260858325924574,
0.5759061866255807,
2.7022875569683342,
1.0591327405967745,
0.21449833798124354,
0.19970388388081273,
0.018242139911433558,
-0.630960146999549,
-2.389646042147776,
0.5424304992480339,
-1.2159551561948718,
-1.6851632640204128,
-0.4812221268109694,
0.6217652794219579,
-0.380139431677482,
-0.2643524783321051,
0.5106648694993016,
-0.895602157034141,
-0.20559568725141816,
1.5449271875734911,
1.544075783565114,
0.17877619857826843,
1.9729717339967108,
0.8302033109816261,
-0.39118561199170965,
-0.4428357598297098,
-0.02550407946753186,
-1.0202977138210447,
2.6604654314300835,
1.9163029269361842,
0.34697436596877657,
-0.8078124769022497,
-1.3876596649099957,
0.44707250163663864,
-0.6752837232272447,
-0.851291770954755,
0.7599767868730256,
0.8134109401706875,
-1.6766750539980289,
-0.06051832829232975,
-0.4652931327216134,
-0.9249124398287735,
1.9022739762222731,
1.7632300613807597,
1.675335012283785,
0.47529854476887495,
-0.7892463423254658,
0.3910120652706098,
0.5812432547936405,
0.2693084649672777,
-0.08138564925779349,
0.9150619269526952,
-0.8637356349272142,
-0.14137853834901817,
-0.20192754829896423,
0.04718228147088756,
-0.9743600144318,
-0.9936290943927825,
0.3544612180477054,
0.6839546770735121,
1.5089070357620178,
1.301167565172228,
-1.5396145667672985,
0.42854366341485456,
-1.5876582617301032,
-0.0316985879141714,
0.3144220016570915,
-0.05054766725644431,
0.2934139006870167,
0.11396170275994542,
-0.6472140129693643,
1.6556030742445431,
1.0319410208453506,
0.3292217603989991,
-0.058758121958605435,
-0.19917171648476298,
-0.5192866115874029,
0.1997510689920335,
-1.3675686656161756,
-1.7761517497832053,
-0.11260276070167097,
0.9717892642758689,
0.0840815981843948,
-0.40211265381258554,
0.27384496844034517,
-1.0403875081272367,
1.2884781173493884,
-1.8066239592554476,
1.1136979156298865,
-0.06223155785690416,
1.3930381289015936,
0.4586305673655182,
1.3159249757827194,
-0.5369892835955705,
0.17827408233621184,
0.22693934439969682,
0.8216240002114816,
-1.0422409752281838,
0.3329686606709231,
-1.5128804353968217,
1.0323052869815534,
1.1640486934424354,
1.6450118078345612,
-0.6717687395070293,
-0.08135119186406627,
1.2746921873544188,
-0.8255794145095643,
0.7123504776564864,
0.6953336934741682,
2.191382322698439,
1.4155790749261592,
2.4681081786912866,
-2.2904357033803815,
-0.8375155191566624,
1.1040106662196736,
0.7084133268872015,
-3.401968681942055,
0.23237090512844757,
1.1199436238058174,
0.6333916486592628,
-0.6012340913121055,
-0.3693951838866523,
-1.7742670566875682,
-0.36431378282545124,
-0.4042586409194551,
-0.04648644034604476,
1.5138191613743486,
-0.2053670782251071,
1.8679122383251414,
0.8355881018692999,
-0.5369705129279005,
-0.7909355080370954,
2.1080036780007987,
0.019537331188020687,
-1.4672982688640615,
-1.486842866467901,
-1.1036839537574874,
1.0800858540685894,
-0.2313974176207594,
0.47763272078271807,
-1.9196070490691473,
-0.8193535127855751,
-0.6853651905832031,
-0.18272370464882973,
-0.33413577684633056,
2.2261342671906106,
1.6853726343573683,
0.8563421109235769,
1.0468799885096596,
0.12189082561416206,
-1.3596466927672854,
-0.7607432068282968,
0.7061728288620306,
-0.4384478018639071,
0.8620104661898899,
1.04258758121448,
-1.1464159128515612,
0.9617945424413628,
0.04987102831355013,
-0.8472878887606543,
0.32986774370339184,
1.278319839581162,
-0.4040926804592034,
-0.6691567800662129,
0.9415431940597389,
0.3974846022291844,
-0.8425204662387112,
-1.506166868030291,
-0.04248497940038203,
0.26434168799067986,
-1.5698380163561454,
-0.6651727917714935,
1.2400220571204048,
-0.1251830593977037,
0.6156254221302833,
0.43585628657139575,
-1.6014619037611209,
1.9152323656075512,
-0.8847911114213622,
1.359854519784993,
-0.5554989575409871,
0.25064804193232354,
0.7976616257678464,
0.37834567410982123,
-0.6300374359617635,
-1.0613465068052854,
-0.866474302027355,
1.2458556977164312,
0.577814049080149,
2.069400463823993,
0.9068690176961165,
-0.5031387968484738,
-0.3640749863516844,
-1.041502465417534,
0.6732994659644133,
-0.006355018868252906,
-0.3650517541386253,
1.0975063446734974,
-2.203726812834859,
1.060685913143899,
-0.4618706570892267,
0.06475263817517128,
-0.19326357638969882,
-0.01812119454736379,
0.1337618009668529,
1.1838276997792907,
0.4273677345455913,
-0.4912341608307858,
0.2349993979417651,
0.9566260826411601,
-0.7948243131958422,
-0.6168334352331588,
0.3369425926447926,
0.8547756445246633,
0.2666330662219728,
2.431868771129661,
1.0089732701876513,
-0.1162341515974066,
-1.1746306816795218,
-0.08227639025627424,
0.794676385688044,
0.15005011094018297,
-0.8763821573601055,
-1.0811684990769739,
0.6311588092267179,
0.026124278982220386,
0.8306502001533514,
1.0856487813261877,
-0.018702855899823106,
-0.07338137135247896,
-0.8435746484744243,
-0.18091216366556986,
0.2295807891528797,
-1.0689295774443397,
-1.5621175533013612,
1.3314045672598216,
0.6211561903553582,
1.0479302317100871,
-1.1509436982013124,
0.447985084931758,
0.19917261474342404,
0.3582887259341301,
0.9953552868908098,
0.8948165434511316,
0.4949033431999123,
-0.23004847985703908,
0.6411581535557106,
-1.1589671573242186,
-0.13691519182560624,
-0.8849560872785238,
0.6629182075027006,
2.2608150731789696,
2.2823614453180294,
-1.2291376923498247,
-0.9267975556981378,
0.2597417839242135,
-0.7667310491821938,
0.10503294084132372,
2.960320355577672,
-1.0645098483081497,
-1.2888339889815872,
-0.6564570556444346,
0.4742489396354781,
0.8879606773334898,
-0.6477585196839569,
-0.7309497810668936,
1.7025953934976548,
0.1789174966941155,
-0.4839093362740933,
-0.8917713440107442,
1.4521776747175792,
-0.1676974219641624,
-0.500672037099228,
-0.2947747621553442,
0.929636971325952,
-0.7614935150071248,
1.6886298813725842,
-0.8136217834373227,
1.2030997228178093,
1.382267485738376,
2.594387458306705,
-0.7703668776292266,
-0.7642584795112598,
1.3356598324609947,
-0.5745269784148925,
-2.212092904499444,
-1.727975556661197,
-0.18543087256023608,
-0.10167435635752538,
1.3480966068787303,
0.0142803272337873,
-0.480077631815393,
-0.32270216749876185,
-1.7884435311074431,
-0.5695640948971382,
-0.22859087912027687,
-0.08783386938029487,
-0.18151955278624396,
0.2031493507095467,
0.06444304447669409,
-0.4339138073294572,
0.236563959074551,
-0.2937958719187449,
0.1611232843821199,
-0.6574871644742827,
1.3141902865107886,
0.6093649138398077,
0.056674985715912514,
-1.828714441504608,
-0.46768482587669535,
0.6489735384886999,
0.5035677725398181,
-0.887590772676158,
-0.3222316759913631,
-0.35172770495027483,
-0.4329205472963193,
-0.8449916868048998,
0.38282765028957993,
1.3171924061732359,
0.2956667124648384,
0.5390909497681301,
-0.7591989862253667,
-1.1520792974885883,
-0.39344757869384944,
0.6192677330177175,
-0.05578834574542242,
0.593015990282657,
0.9374465229256678,
0.647772562443425,
1.1071167572595217,
-1.3015016617832518,
1.267300472456379,
-0.5807673178649629,
0.9343468385348384,
-0.28554893036513673,
0.4487573993840033,
0.6749018890520516,
-1.20482985206765,
0.17291806504654686,
-0.4124576407610529,
-0.9203236505429044,
-0.7461342369802754,
-0.19694162321688435,
0.46556512963300906,
0.5198366004764268,
-1.7222561645076129,
-0.7078891617994071,
-1.1653209054214695,
1.5560964971092122,
0.3335520152642012,
0.008390825910327906,
0.11336719644324977,
0.3158913817073965,
0.4704483453862008,
-0.5700583482495889,
-1.276634964816531,
-1.7880560933777756,
-0.26514994709973827,
0.6194447367446946,
-0.654762456435761,
1.0621929196158544,
0.4454719444987052,
-0.9323145612076791,
1.3197357985874438,
-0.8792938558447049,
-0.2470423905508279,
0.5128954444799875,
-0.09202044992462606,
-1.3082892596744382,
-0.34428948138804927,
0.012422196356164879,
1.4626152292162142,
0.34678216997159833,
0.409462409138861,
0.32838364873801185,
1.8776849459782967,
1.6816627852133539,
-0.24894138693568296,
0.7150105850753732,
0.22929306929129853,
-0.21434910504054566,
1.3339497173912471,
-1.2497042452057836,
-0.04487255356399775,
-0.6486304639082145,
-0.8048044333264733,
-1.8090170501469942,
1.481689285694336,
-1.4772553200884717,
-0.36792462539303805,
-1.103508260812736,
-0.2135236993720317,
0.40889179796540165,
1.993585196733386,
0.43879096427562897,
-0.44512875171982147,
-1.1780830020629518,
-1.666001035275436,
-0.2977294957665528,
1.7299614542270356,
0.9882265798853356,
2.2412430815464597,
0.5801434875813244,
-0.739190619909163,
-1.2663490594895201,
0.5735521649879137,
1.2105709455012765,
1.9112159951415644,
-2.259218931706201,
-0.563310876529377,
-2.4119185903750493,
0.9662624485722368,
-0.22788851242764951,
0.9198283887420099,
0.7855927065251492,
-0.7459868094792474,
0.10543289218409971,
0.6401750224618271,
-0.0077375118689326705,
-0.11647036625911977,
-0.4722391874001602,
-0.2718425102733572,
-0.8796746964457087,
0.6112903638894259,
0.5347851929096421,
-0.4749419210717794,
1.0633720764557604,
-0.2590556665572949,
2.590182301241823,
1.4524061372706638,
-0.8503733047335056,
0.5609357391481067,
-1.5661825434426477,
0.8019667474525984,
1.2716795425969496,
0.20011166646917924,
-0.7105405282282679,
-0.5593129072748189,
-1.2401371010520867,
-0.7002520937780202,
-2.236596391787529,
-1.8130090502823886,
-0.23990633860801777,
1.7428780878151378,
1.4661206538178901,
-0.8678567353744017,
0.2957423562639015,
0.13935419069962593,
1.399598845123674,
0.059729544605779575,
-0.9607778026198247,
0.18474907798482051,
1.0117193651915666,
-0.9173540069396245,
0.8934765521365161,
-0.665655291396948,
-0.32955768273493324,
0.3062873812209283,
0.177342106982554,
0.3595522704599547,
-1.5964209653110262,
0.6705899137346863,
-1.1034642863469553,
-1.0029562484065524,
0.10622956543479244,
0.4261871936541378,
0.7777501694354336,
-0.806235923997437,
-0.8272801398172428,
-1.2783440745845536,
0.5982979227669168,
-0.28214494859284556,
1.101560367699546,
-0.14008021262664466,
-0.38717961692054237,
0.9962925044431369,
-0.7391490127960976,
-0.06294945881724459,
0.7283671247384875,
-0.8458895297768138,
0.22808829204347086,
0.43685668023014523,
0.9204095286935638,
-0.028241645704951284,
0.15951784765135396,
0.8068984900818966,
-0.34387965576978663,
0.573828962760762,
-0.13374515460012618,
-0.5552788325377814,
0.5644705833909952,
-0.7500532220469983,
0.33436674493862256,
-0.8595435026628129,
-0.38943898244735853,
0.6401502590131951,
-1.2968645995363652,
0.5861622311675501,
0.2311759458689689,
0.10962292708600496,
-0.26025023584932205,
-0.5398478003611565,
-1.0514168636922954,
1.2689172189127857,
1.7029909647408918,
-0.02325431623491577,
-0.3064675950620902,
-1.5816446841009473,
0.6874254059433739,
0.7755967316475798,
1.4119333324396597,
0.14198739135512406,
0.2927714469848192,
-0.7239793888399496,
0.3506448783535265,
-0.7568480706640158,
-1.2158508387501554,
0.22197589131086445,
-0.5621415304506887,
-1.2381112050191665,
-1.917208333033256,
-0.3321665793941188,
-0.5916951886991071,
-1.244826507645294,
-0.29767661008214463,
0.8590635852032509,
-1.8579290298421591,
-1.0470546224962876,
-2.540080936704841,
0.5458326769958273,
0.042222128206941614,
0.6080450228346708,
0.6542717901662132,
-1.7292955132690793,
-0.4793123354077725,
0.7341767020417185,
-1.3322222208234826,
-0.5076389542432337,
0.684399163420284,
0.3948487980667425,
-1.7919279627150193,
1.582925890933478,
0.8341846456063038,
0.11776890377042544,
1.7471239793853526,
1.2269451783893597,
0.4235463733287474,
1.5908284320029056,
-1.635191535538596,
0.04419903330064594,
-1.264385360373252,
0.5370192519783876,
1.2368603501240771,
-0.9241079150337286,
-0.3428051342915208,
0.0882286441353256,
-2.210824604513402,
-1.9000343283757128,
0.4633735273417207,
-0.32534396967175094,
0.026187836765356437,
0.18253601230609245,
0.8519745761039671,
-0.028225375482784816,
-0.5114197447067229,
-1.2428743809444227,
0.2879711400745508,
1.2857130031108321,
0.5296743558975853,
-0.8440551904275335,
-1.3776032491368861,
1.8164028526343798,
-1.1422045767986222,
-1.8675179752970443,
0.6969635320800454,
0.9444010906414336,
-1.28197913481747,
-0.06259132322304235,
-0.4518754825442558,
0.9183188639099813,
-0.2916931407869574,
-1.1464007469977915,
-0.4475136941593681,
0.44385573868752803,
2.1606711638680762,
-1.4813603018181851,
-0.5647618024870872,
-1.474746204557383,
-2.9067748098220485,
0.06132111635940877,
-0.09663310829361334,
-1.087053744976143,
-1.774855117659402,
0.8130120568830074,
-0.5179279676199186,
-0.32549430825787784,
-1.1995838271705979,
0.8587480835176114,
-0.02095126282663596,
0.6677898019388228,
-1.1891003375304232,
-2.1125937754631305,
-0.047765192715672734,
0.09812525010300294,
-1.034992359189106,
1.0213451864081846,
1.0788796513160641,
-1.444469239557739,
0.28341828947950637,
-2.4556013891966737,
1.7126080715698266,
-0.5943068899412715,
1.0897594994215383,
-0.16345461884651272,
0.7027032523865234,
2.2851158088542562,
0.5038100496225458,
-0.16724173993999966,
-0.6747457076421414,
0.42254684460738184,
1.277203836895222,
-0.34438446183574595,
0.38956738377878264,
-0.26884968654334923,
-0.02148772950361766,
0.02044885235644607,
-1.3873669828232345,
0.19995968746809226,
-1.5826859815811556,
-0.20385119370067947,
0.5724329589281247,
-1.330307658319185,
0.7756101314358208,
-0.4989071461473931,
0.5388161769427321,
-0.9811085284266614,
2.335331094403556,
-0.5588657325211347,
-1.2850853695283377,
0.40092993245913744,
-1.9675685522110529,
0.9378938542456674,
-0.18645815013912917,
-0.6828273180353106,
-1.840122530632185,
-1.2581798109361761,
0.2867275394896832,
],
}
return data
@pytest.fixture
def numeric_high_card_dataset(test_backend, numeric_high_card_dict):
schemas = {
"pandas": {"norm_0_1": "float64",},
"postgresql": {
# "norm_0_1": "DOUBLE_PRECISION",
"norm_0_1": "NUMERIC",
},
"sqlite": {"norm_0_1": "FLOAT",},
"mysql": {"norm_0_1": "FLOAT",},
"mssql": {"norm_0_1": "FLOAT",},
"spark": {"norm_0_1": "FloatType",},
}
return get_dataset(test_backend, numeric_high_card_dict, schemas=schemas)
@pytest.fixture
def datetime_dataset(test_backend):
data = {
"datetime": [
str(datetime.datetime(2020, 2, 4, 22, 12, 5, 943152)),
str(datetime.datetime(2020, 2, 5, 22, 12, 5, 943152)),
str(datetime.datetime(2020, 2, 6, 22, 12, 5, 943152)),
str(datetime.datetime(2020, 2, 7, 22, 12, 5, 943152)),
str(datetime.datetime(2020, 2, 8, 22, 12, 5, 943152)),
str(datetime.datetime(2020, 2, 9, 22, 12, 5, 943152)),
str(datetime.datetime(2020, 2, 10, 22, 12, 5, 943152)),
str(datetime.datetime(2020, 2, 11, 22, 12, 5, 943152)),
str(datetime.datetime(2020, 2, 12, 22, 12, 5, 943152)),
str(datetime.datetime(2020, 2, 13, 22, 12, 5, 943152)),
]
}
schemas = {
"pandas": {"datetime": "datetime64",},
"postgresql": {"datetime": "TIMESTAMP",},
"sqlite": {"datetime": "TIMESTAMP",},
"mysql": {"datetime": "TIMESTAMP",},
"mssql": {"datetime": "DATETIME",},
"spark": {"datetime": "TimestampType",},
}
return get_dataset(test_backend, data, schemas=schemas)
@pytest.fixture
def non_numeric_low_card_dataset(test_backend):
"""Provide dataset fixtures that have special values and/or are otherwise useful outside
the standard json testing framework"""
data = {
"lowcardnonnum": [
"a",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
"b",
]
}
schemas = {
"pandas": {"lowcardnonnum": "str",},
"postgresql": {"lowcardnonnum": "TEXT",},
"sqlite": {"lowcardnonnum": "VARCHAR",},
"mysql": {"lowcardnonnum": "TEXT",},
"mssql": {"lowcardnonnum": "VARCHAR",},
"spark": {"lowcardnonnum": "StringType",},
}
return get_dataset(test_backend, data, schemas=schemas)
@pytest.fixture
def non_numeric_high_card_dataset(test_backend):
"""Provide dataset fixtures that have special values and/or are otherwise useful outside
the standard json testing framework"""
data = {
"highcardnonnum": [
"CZVYSnQhHhoti8mQ66XbDuIjE5FMeIHb",
"cPWAg2MJjh8fkRRU1B9aD8vWq3P8KTxJ",
"4tehKwWiCDpuOmTPRYYqTqM7TvEa8Zi7",
"ZvlAnCGiGfkKgQoNrhnnyrjmU7sLsUZz",
"AaqMhdYukVdexTk6LlWvzXYXTp5upPuf",
"ZSKmXUB35K14khHGyjYtuCHuI8yeM7yR",
"F1cwKp4HsCN2s2kXQGR5RUa3WAcibCq2",
"coaX8bSHoVZ8FP8SuQ57SFbrvpRHcibq",
"3IzmbSJF525qtn7O4AvfKONnz7eFgnyU",
"gLCtw7435gaR532PNFVCtvk14lNJpZXv",
"hNyjMYZkVlOKRjhg8cKymU5Bvnh0MK5R",
"IqKC2auGTNehP8y24HzDQOdt9oysgFyx",
"TePy034aBKlNeAmcJmKJ4p1yF7EUYEOg",
"cIfDv6ieTAobe84P84InzDKrJrccmqbq",
"m1979gfI6lVF9ijJA245bchYFd1EaMap",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"7wcR161jyKYhFLEZkhFqSXLwXW46I5x8",
"IpmNsUFgbbVnL0ljJZOBHnTV0FKARwSn",
"hsA4btHJg6Gq1jwOuOc3pl2UPB5QUwZg",
"vwZyG0jGUys3HQdUiOocIbzhUdUugwKX",
"rTc9h94WjOXN5Wg40DyatFEFfp9mgWj6",
"p1f20s14ZJGUTIBUNeBmJEkWKlwoyqjA",
"VzgAIYNKHA0APN0oZtzMAfmbCzJenswy",
"IO7BqR3iS136YMlLCEo6W3jKNOVJIlLG",
"eTEyhiRuyEcTnHThi1W6yi1mxUjq8TEp",
"4OHPKQgk3sPPYpKWcEWUtNZ0jv00UuPU",
"ZJCstyyUvTR2gwSM6FLgkXYDwG54qo8u",
"nGQsvDAzuL5Yc2XpqoG5P7RhpiTpJp8H",
"NfX4KfEompMbbKloFq8NQpdXtk5PjaPe",
"CP22IFHDX1maoSjTEdtBfrMHWQKACGDB",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"hGwZQW7ao9HqNV2xAovuMBdyafNDE8q6",
"OJmDHbqP1wzarsaSwCphsqvdy5SnTQMT",
"JQbXIcgwUhttfPIGB7VGGfL2KiElabrO",
"eTTNDggfPpRC22SEVNo9W0BPEWO4Cr57",
"GW2JuUJmuCebia7RUiCNl2BTjukIzZWj",
"oVFAvQEKmRTLBqdCuPoJNvzPvQ7UArWC",
"zeMHFFKLr5j4DIFxRQ7jHWCMClrP3LmJ",
"eECcArV5TZRftL6ZWaUDO6D2l3HiZj1Y",
"xLNJXaCkOLrD6E0kgGaFOFwctNXjrd77",
"1f8KOCkOvehXYvN8PKv1Ch6dzOjRAr01",
"uVF6HJgjVmoipK1sEpVOFJYuv2TXXsOG",
"agIk8H2nFa0K27IFr0VM2RNp6saihYI3",
"cAUnysbb8SBLSTr0H7cA1fmnpaL80e0N",
"fM1IzD5USx4lMYi6bqPCEZjd2aP7G9vv",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"i65d8jqET5FsVw9t5BwAvBjkEJI6eUMj",
"HbT1b7DQL7n7ZEt2FsKHIggycT1XIYd8",
"938eC0iGMSqZNlqgDNG9YVE7t4izO2Ev",
"PyZetp4izgE4ymPcUXyImF5mm7I6zbta",
"FaXA6YSUrvSnW7quAimLqQMNrU1Dxyjs",
"PisVMvI9RsqQw21B7qYcKkRo5c8C2AKd",
"eSQIxFqyYVf55UMzMEZrotPO74i3Sh03",
"2b74DhJ6YFHrAkrjK4tvvKkYUKll44bR",
"3svDRnrELyAsC69Phpnl2Os89856tFBJ",
"ZcSGN9YYNHnHjUp0SktWoZI7JDmvRTTN",
"m9eDkZ5oZEOFP3HUfaZEirecv2UhQ1B1",
"wZTwJmMX5Q58DhDdmScdigTSyUUC04sO",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"KAuFgcmRKQPIIqGMAQQPfjyC1VXt40vs",
"0S4iueoqKNjvS55O57BdY3DbfwhIDwKc",
"ywbQfOLkvXEUzZISZp1cpwCenrrNPjfF",
"Mayxk8JkV3Z6aROtnsKyqwVK5exiJa8i",
"pXqIRP5fQzbDtj1xFqgJey6dyFOJ1YiU",
"6Ba6RSM56x4MIaJ2wChQ3trBVOw1SWGM",
"puqzOpRJyNVAwH2vLjVCL3uuggxO5aoB",
"jOI4E43wA3lYBWbV0nMxqix885Tye1Pf",
"YgTTYpRDrxU1dMKZeVHYzYNovH2mWGB7",
"24yYfUg1ATvfI1PW79ytsEqHWJHI69wQ",
"mS2AVcLFp6i36sX7yAUrdfM0g0RB2X4D",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"ItvI4l02oAIZEd5cPtDf4OnyBazji0PL",
"DW4oLNP49MNNENFoFf7jDTI04xdvCiWg",
"vrOZrkAS9MCGOqzhCv4cmr5AGddVBShU",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"R74JT4EEhh3Xeu5tbx8bZFkXZRhx6HUn",
"bd9yxS6b1QrKXuT4irY4kpjSyLmKZmx6",
"UMdFQNSiJZtLK3jxBETZrINDKcRqRd0c",
"He7xIY2BMNZ7vSO47KfKoYskVJeeedI7",
"G8PqO0ADoKfDPsMT1K0uOrYf1AtwlTSR",
"hqfmEBNCA7qgntcQVqB7beBt0hB7eaxF",
"mlYdlfei13P6JrT7ZbSZdsudhE24aPYr",
"gUTUoH9LycaItbwLZkK9qf0xbRDgOMN4",
"xw3AuIPyHYq59Qbo5QkQnECSqd2UCvLo",
"kbfzRyRqGZ9WvmTdYKDjyds6EK4fYCyx",
"7AOZ3o2egl6aU1zOrS8CVwXYZMI8NTPg",
"Wkh43H7t95kRb9oOMjTSqC7163SrI4rU",
"x586wCHsLsOaXl3F9cYeaROwdFc2pbU1",
"oOd7GdoPn4qqfAeFj2Z3ddyFdmkuPznh",
"suns0vGgaMzasYpwDEEof2Ktovy0o4os",
"of6W1csCTCBMBXli4a6cEmGZ9EFIOFRC",
"mmTiWVje9SotwPgmRxrGrNeI9DssAaCj",
"pIX0vhOzql5c6Z6NpLbzc8MvYiONyT54",
"nvyCo3MkIK4tS6rkuL4Yw1RgGKwhm4c2",
"prQGAOvQbB8fQIrp8xaLXmGwcxDcCnqt",
"ajcLVizD2vwZlmmGKyXYki03SWn7fnt3",
"mty9rQJBeTsBQ7ra8vWRbBaWulzhWRSG",
"JL38Vw7yERPC4gBplBaixlbpDg8V7gC6",
"MylTvGl5L1tzosEcgGCQPjIRN6bCUwtI",
"hmr0LNyYObqe5sURs408IhRb50Lnek5K",
"CZVYSnQhHhoti8mQ66XbDuIjE5FMeIHb",
"cPWAg2MJjh8fkRRU1B9aD8vWq3P8KTxJ",
"4tehKwWiCDpuOmTPRYYqTqM7TvEa8Zi7",
"ZvlAnCGiGfkKgQoNrhnnyrjmU7sLsUZz",
"AaqMhdYukVdexTk6LlWvzXYXTp5upPuf",
"ZSKmXUB35K14khHGyjYtuCHuI8yeM7yR",
"F1cwKp4HsCN2s2kXQGR5RUa3WAcibCq2",
"coaX8bSHoVZ8FP8SuQ57SFbrvpRHcibq",
"3IzmbSJF525qtn7O4AvfKONnz7eFgnyU",
"gLCtw7435gaR532PNFVCtvk14lNJpZXv",
"hNyjMYZkVlOKRjhg8cKymU5Bvnh0MK5R",
"IqKC2auGTNehP8y24HzDQOdt9oysgFyx",
"TePy034aBKlNeAmcJmKJ4p1yF7EUYEOg",
"cIfDv6ieTAobe84P84InzDKrJrccmqbq",
"m1979gfI6lVF9ijJA245bchYFd1EaMap",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"7wcR161jyKYhFLEZkhFqSXLwXW46I5x8",
"IpmNsUFgbbVnL0ljJZOBHnTV0FKARwSn",
"hsA4btHJg6Gq1jwOuOc3pl2UPB5QUwZg",
"vwZyG0jGUys3HQdUiOocIbzhUdUugwKX",
"rTc9h94WjOXN5Wg40DyatFEFfp9mgWj6",
"p1f20s14ZJGUTIBUNeBmJEkWKlwoyqjA",
"VzgAIYNKHA0APN0oZtzMAfmbCzJenswy",
"IO7BqR3iS136YMlLCEo6W3jKNOVJIlLG",
"eTEyhiRuyEcTnHThi1W6yi1mxUjq8TEp",
"4OHPKQgk3sPPYpKWcEWUtNZ0jv00UuPU",
"ZJCstyyUvTR2gwSM6FLgkXYDwG54qo8u",
"nGQsvDAzuL5Yc2XpqoG5P7RhpiTpJp8H",
"NfX4KfEompMbbKloFq8NQpdXtk5PjaPe",
"CP22IFHDX1maoSjTEdtBfrMHWQKACGDB",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"hGwZQW7ao9HqNV2xAovuMBdyafNDE8q6",
"OJmDHbqP1wzarsaSwCphsqvdy5SnTQMT",
"JQbXIcgwUhttfPIGB7VGGfL2KiElabrO",
"eTTNDggfPpRC22SEVNo9W0BPEWO4Cr57",
"GW2JuUJmuCebia7RUiCNl2BTjukIzZWj",
"oVFAvQEKmRTLBqdCuPoJNvzPvQ7UArWC",
"zeMHFFKLr5j4DIFxRQ7jHWCMClrP3LmJ",
"eECcArV5TZRftL6ZWaUDO6D2l3HiZj1Y",
"xLNJXaCkOLrD6E0kgGaFOFwctNXjrd77",
"1f8KOCkOvehXYvN8PKv1Ch6dzOjRAr01",
"uVF6HJgjVmoipK1sEpVOFJYuv2TXXsOG",
"agIk8H2nFa0K27IFr0VM2RNp6saihYI3",
"cAUnysbb8SBLSTr0H7cA1fmnpaL80e0N",
"fM1IzD5USx4lMYi6bqPCEZjd2aP7G9vv",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"i65d8jqET5FsVw9t5BwAvBjkEJI6eUMj",
"HbT1b7DQL7n7ZEt2FsKHIggycT1XIYd8",
"938eC0iGMSqZNlqgDNG9YVE7t4izO2Ev",
"PyZetp4izgE4ymPcUXyImF5mm7I6zbta",
"FaXA6YSUrvSnW7quAimLqQMNrU1Dxyjs",
"PisVMvI9RsqQw21B7qYcKkRo5c8C2AKd",
"eSQIxFqyYVf55UMzMEZrotPO74i3Sh03",
"2b74DhJ6YFHrAkrjK4tvvKkYUKll44bR",
"3svDRnrELyAsC69Phpnl2Os89856tFBJ",
"ZcSGN9YYNHnHjUp0SktWoZI7JDmvRTTN",
"m9eDkZ5oZEOFP3HUfaZEirecv2UhQ1B1",
"wZTwJmMX5Q58DhDdmScdigTSyUUC04sO",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"KAuFgcmRKQPIIqGMAQQPfjyC1VXt40vs",
"0S4iueoqKNjvS55O57BdY3DbfwhIDwKc",
"ywbQfOLkvXEUzZISZp1cpwCenrrNPjfF",
"Mayxk8JkV3Z6aROtnsKyqwVK5exiJa8i",
"pXqIRP5fQzbDtj1xFqgJey6dyFOJ1YiU",
"6Ba6RSM56x4MIaJ2wChQ3trBVOw1SWGM",
"puqzOpRJyNVAwH2vLjVCL3uuggxO5aoB",
"jOI4E43wA3lYBWbV0nMxqix885Tye1Pf",
"YgTTYpRDrxU1dMKZeVHYzYNovH2mWGB7",
"24yYfUg1ATvfI1PW79ytsEqHWJHI69wQ",
"mS2AVcLFp6i36sX7yAUrdfM0g0RB2X4D",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"ItvI4l02oAIZEd5cPtDf4OnyBazji0PL",
"DW4oLNP49MNNENFoFf7jDTI04xdvCiWg",
"vrOZrkAS9MCGOqzhCv4cmr5AGddVBShU",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"R74JT4EEhh3Xeu5tbx8bZFkXZRhx6HUn",
"bd9yxS6b1QrKXuT4irY4kpjSyLmKZmx6",
"UMdFQNSiJZtLK3jxBETZrINDKcRqRd0c",
"He7xIY2BMNZ7vSO47KfKoYskVJeeedI7",
"G8PqO0ADoKfDPsMT1K0uOrYf1AtwlTSR",
"hqfmEBNCA7qgntcQVqB7beBt0hB7eaxF",
"mlYdlfei13P6JrT7ZbSZdsudhE24aPYr",
"gUTUoH9LycaItbwLZkK9qf0xbRDgOMN4",
"xw3AuIPyHYq59Qbo5QkQnECSqd2UCvLo",
"kbfzRyRqGZ9WvmTdYKDjyds6EK4fYCyx",
"7AOZ3o2egl6aU1zOrS8CVwXYZMI8NTPg",
"Wkh43H7t95kRb9oOMjTSqC7163SrI4rU",
"x586wCHsLsOaXl3F9cYeaROwdFc2pbU1",
"oOd7GdoPn4qqfAeFj2Z3ddyFdmkuPznh",
"suns0vGgaMzasYpwDEEof2Ktovy0o4os",
"of6W1csCTCBMBXli4a6cEmGZ9EFIOFRC",
"mmTiWVje9SotwPgmRxrGrNeI9DssAaCj",
"pIX0vhOzql5c6Z6NpLbzc8MvYiONyT54",
"nvyCo3MkIK4tS6rkuL4Yw1RgGKwhm4c2",
"prQGAOvQbB8fQIrp8xaLXmGwcxDcCnqt",
"ajcLVizD2vwZlmmGKyXYki03SWn7fnt3",
"mty9rQJBeTsBQ7ra8vWRbBaWulzhWRSG",
"JL38Vw7yERPC4gBplBaixlbpDg8V7gC6",
"MylTvGl5L1tzosEcgGCQPjIRN6bCUwtI",
"hmr0LNyYObqe5sURs408IhRb50Lnek5K",
],
# Built from highcardnonnum using the following:
# vals = pd.Series(data["highcardnonnum"])
# sample_vals = vals.sample(n=10, random_state=42)
# weights = np.random.RandomState(42).rand(10)
# weights = weights / np.sum(weights)
# new_vals = sample_vals.sample(n=200, weights=weights, replace=True, random_state=11)
"medcardnonnum": [
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"ajcLVizD2vwZlmmGKyXYki03SWn7fnt3",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"ajcLVizD2vwZlmmGKyXYki03SWn7fnt3",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"ajcLVizD2vwZlmmGKyXYki03SWn7fnt3",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"ajcLVizD2vwZlmmGKyXYki03SWn7fnt3",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"ajcLVizD2vwZlmmGKyXYki03SWn7fnt3",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"ajcLVizD2vwZlmmGKyXYki03SWn7fnt3",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"ajcLVizD2vwZlmmGKyXYki03SWn7fnt3",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"ajcLVizD2vwZlmmGKyXYki03SWn7fnt3",
"ajcLVizD2vwZlmmGKyXYki03SWn7fnt3",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"ajcLVizD2vwZlmmGKyXYki03SWn7fnt3",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"mS2AVcLFp6i36sX7yAUrdfM0g0RB2X4D",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"NfX4KfEompMbbKloFq8NQpdXtk5PjaPe",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"NfX4KfEompMbbKloFq8NQpdXtk5PjaPe",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"NfX4KfEompMbbKloFq8NQpdXtk5PjaPe",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"NfX4KfEompMbbKloFq8NQpdXtk5PjaPe",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"ajcLVizD2vwZlmmGKyXYki03SWn7fnt3",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"ajcLVizD2vwZlmmGKyXYki03SWn7fnt3",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"ajcLVizD2vwZlmmGKyXYki03SWn7fnt3",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"ajcLVizD2vwZlmmGKyXYki03SWn7fnt3",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"ajcLVizD2vwZlmmGKyXYki03SWn7fnt3",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"ajcLVizD2vwZlmmGKyXYki03SWn7fnt3",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"hW0kFZ6ijfciJWN4vvgcFa6MWv8cTeVk",
"T7EUE54HUhyJ9Hnxv1pKY0Bmg42qiggP",
"NhTsracusfp5V6zVeWqLZnychDl7jjO4",
"k8B9KCXhaQb6Q82zFbAzOESAtDxK174J",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"2K8njWnvuq1u6tkzreNhxTEyO8PTeWer",
"ajcLVizD2vwZlmmGKyXYki03SWn7fnt3",
"oRnY5jDWFw2KZRYLh6ihFd021ggy4UxJ",
],
}
schemas = {
"pandas": {"highcardnonnum": "str", "medcardnonnum": "str",},
"postgresql": {"highcardnonnum": "TEXT", "medcardnonnum": "TEXT",},
"sqlite": {"highcardnonnum": "VARCHAR", "medcardnonnum": "VARCHAR",},
"mysql": {"highcardnonnum": "TEXT", "medcardnonnum": "TEXT",},
"mssql": {"highcardnonnum": "VARCHAR", "medcardnonnum": "VARCHAR",},
"spark": {"highcardnonnum": "StringType", "medcardnonnum": "StringType",},
}
return get_dataset(test_backend, data, schemas=schemas)
def dataset_sample_data(test_backend):
# No infinities for mysql
if test_backend == "mysql":
data = {
# "infinities": [-np.inf, -10, -np.pi, 0, np.pi, 10/2.2, np.inf],
"nulls": [np.nan, None, 0, 1.1, 2.2, 3.3, None],
"naturals": [1, 2, 3, 4, 5, 6, 7],
}
else:
data = {
"infinities": [-np.inf, -10, -np.pi, 0, np.pi, 10 / 2.2, np.inf],
"nulls": [np.nan, None, 0, 1.1, 2.2, 3.3, None],
"naturals": [1, 2, 3, 4, 5, 6, 7],
}
schemas = {
"pandas": {"infinities": "float64", "nulls": "float64", "naturals": "float64"},
"postgresql": {
"infinities": "DOUBLE_PRECISION",
"nulls": "DOUBLE_PRECISION",
"naturals": "NUMERIC",
},
"sqlite": {"infinities": "FLOAT", "nulls": "FLOAT", "naturals": "FLOAT"},
"mysql": {"nulls": "FLOAT", "naturals": "FLOAT"},
"mssql": {"infinities": "FLOAT", "nulls": "FLOAT", "naturals": "FLOAT"},
"spark": {
"infinities": "FloatType",
"nulls": "FloatType",
"naturals": "FloatType",
},
}
return data, schemas
@pytest.fixture
def dataset(test_backend):
"""Provide dataset fixtures that have special values and/or are otherwise useful outside
the standard json testing framework"""
data, schemas = dataset_sample_data(test_backend)
return get_dataset(test_backend, data, schemas=schemas)
@pytest.fixture
def pandas_dataset():
test_backend = "PandasDataset"
data, schemas = dataset_sample_data(test_backend)
return get_dataset(test_backend, data, schemas=schemas)
@pytest.fixture
def sqlalchemy_dataset(test_backends):
"""Provide dataset fixtures that have special values and/or are otherwise useful outside
the standard json testing framework"""
if "postgresql" in test_backends:
backend = "postgresql"
elif "sqlite" in test_backends:
backend = "sqlite"
else:
return
data = {
"infinities": [-np.inf, -10, -np.pi, 0, np.pi, 10 / 2.2, np.inf],
"nulls": [np.nan, None, 0, 1.1, 2.2, 3.3, None],
"naturals": [1, 2, 3, 4, 5, 6, 7],
}
schemas = {
"postgresql": {
"infinities": "DOUBLE_PRECISION",
"nulls": "DOUBLE_PRECISION",
"naturals": "DOUBLE_PRECISION",
},
"sqlite": {"infinities": "FLOAT", "nulls": "FLOAT", "naturals": "FLOAT"},
}
return get_dataset(backend, data, schemas=schemas, profiler=None)
@pytest.fixture
def sqlitedb_engine(test_backend):
if test_backend == "sqlite":
import sqlalchemy as sa
return sa.create_engine("sqlite://")
else:
pytest.skip("Skipping test designed for sqlite on non-sqlite backend.")
@pytest.fixture
def postgresql_engine(test_backend):
if test_backend == "postgresql":
import sqlalchemy as sa
engine = sa.create_engine("postgresql://postgres@localhost/test_ci").connect()
yield engine
engine.close()
else:
pytest.skip("Skipping test designed for postgresql on non-postgresql backend.")
@pytest.fixture
def empty_data_context(tmp_path_factory):
project_path = str(tmp_path_factory.mktemp("empty_data_context"))
context = ge.data_context.DataContext.create(project_path)
context_path = os.path.join(project_path, "great_expectations")
asset_config_path = os.path.join(context_path, "expectations")
os.makedirs(asset_config_path, exist_ok=True)
return context
@pytest.fixture
def empty_context_with_checkpoint(empty_data_context):
context = empty_data_context
root_dir = empty_data_context.root_directory
fixture_name = "my_checkpoint.yml"
fixture_path = file_relative_path(
__file__, f"./data_context/fixtures/contexts/{fixture_name}"
)
checkpoints_file = os.path.join(root_dir, "checkpoints", fixture_name)
shutil.copy(fixture_path, checkpoints_file)
assert os.path.isfile(checkpoints_file)
return context
@pytest.fixture
def empty_context_with_checkpoint_stats_enabled(empty_data_context_stats_enabled):
context = empty_data_context_stats_enabled
root_dir = context.root_directory
fixture_name = "my_checkpoint.yml"
fixture_path = file_relative_path(
__file__, f"./data_context/fixtures/contexts/{fixture_name}"
)
checkpoints_file = os.path.join(root_dir, "checkpoints", fixture_name)
shutil.copy(fixture_path, checkpoints_file)
return context
@pytest.fixture
def empty_data_context_stats_enabled(tmp_path_factory, monkeypatch):
# Reenable GE_USAGE_STATS
monkeypatch.delenv("GE_USAGE_STATS")
project_path = str(tmp_path_factory.mktemp("empty_data_context"))
context = ge.data_context.DataContext.create(project_path)
context_path = os.path.join(project_path, "great_expectations")
asset_config_path = os.path.join(context_path, "expectations")
os.makedirs(asset_config_path, exist_ok=True)
return context
@pytest.fixture
def titanic_data_context(tmp_path_factory):
project_path = str(tmp_path_factory.mktemp("titanic_data_context"))
context_path = os.path.join(project_path, "great_expectations")
os.makedirs(os.path.join(context_path, "expectations"), exist_ok=True)
os.makedirs(os.path.join(context_path, "checkpoints"), exist_ok=True)
data_path = os.path.join(context_path, "../data")
os.makedirs(os.path.join(data_path), exist_ok=True)
titanic_yml_path = file_relative_path(
__file__, "./test_fixtures/great_expectations_titanic.yml"
)
shutil.copy(
titanic_yml_path, str(os.path.join(context_path, "great_expectations.yml"))
)
titanic_csv_path = file_relative_path(__file__, "./test_sets/Titanic.csv")
shutil.copy(
titanic_csv_path, str(os.path.join(context_path, "../data/Titanic.csv"))
)
return ge.data_context.DataContext(context_path)
@pytest.fixture
def titanic_data_context_stats_enabled(tmp_path_factory, monkeypatch):
# Reenable GE_USAGE_STATS
monkeypatch.delenv("GE_USAGE_STATS")
project_path = str(tmp_path_factory.mktemp("titanic_data_context"))
context_path = os.path.join(project_path, "great_expectations")
os.makedirs(os.path.join(context_path, "expectations"), exist_ok=True)
os.makedirs(os.path.join(context_path, "checkpoints"), exist_ok=True)
data_path = os.path.join(context_path, "../data")
os.makedirs(os.path.join(data_path), exist_ok=True)
titanic_yml_path = file_relative_path(
__file__, "./test_fixtures/great_expectations_titanic.yml"
)
shutil.copy(
titanic_yml_path, str(os.path.join(context_path, "great_expectations.yml"))
)
titanic_csv_path = file_relative_path(__file__, "./test_sets/Titanic.csv")
shutil.copy(
titanic_csv_path, str(os.path.join(context_path, "../data/Titanic.csv"))
)
return ge.data_context.DataContext(context_path)
@pytest.fixture
def titanic_sqlite_db(sa):
from sqlalchemy import create_engine
titanic_db_path = file_relative_path(__file__, "./test_sets/titanic.db")
engine = create_engine("sqlite:///{}".format(titanic_db_path))
assert engine.execute("select count(*) from titanic").fetchall()[0] == (1313,)
return engine
@pytest.fixture
def titanic_expectation_suite():
return ExpectationSuite(
expectation_suite_name="Titanic.warning",
meta={},
data_asset_type="Dataset",
expectations=[
ExpectationConfiguration(
expectation_type="expect_column_to_exist", kwargs={"column": "PClass"}
),
ExpectationConfiguration(
expectation_type="expect_column_values_to_not_be_null",
kwargs={"column": "Name"},
),
],
)
@pytest.fixture
def empty_sqlite_db(sa):
"""An empty in-memory sqlite db that always gets run."""
try:
from sqlalchemy import create_engine
engine = create_engine("sqlite://")
assert engine.execute("select 1").fetchall()[0] == (1,)
return engine
except ImportError:
raise ValueError("sqlite tests require sqlalchemy to be installed")
@pytest.fixture
@freeze_time("09/26/2019 13:42:41")
def site_builder_data_context_with_html_store_titanic_random(
tmp_path_factory, filesystem_csv_3
):
base_dir = str(tmp_path_factory.mktemp("project_dir"))
project_dir = os.path.join(base_dir, "project_path")
os.mkdir(project_dir)
os.makedirs(os.path.join(project_dir, "data"))
os.makedirs(os.path.join(project_dir, "data/titanic"))
shutil.copy(
file_relative_path(__file__, "./test_sets/Titanic.csv"),
str(os.path.join(project_dir, "data/titanic/Titanic.csv")),
)
os.makedirs(os.path.join(project_dir, "data/random"))
shutil.copy(
os.path.join(filesystem_csv_3, "f1.csv"),
str(os.path.join(project_dir, "data/random/f1.csv")),
)
shutil.copy(
os.path.join(filesystem_csv_3, "f2.csv"),
str(os.path.join(project_dir, "data/random/f2.csv")),
)
ge.data_context.DataContext.create(project_dir)
shutil.copy(
file_relative_path(
__file__, "./test_fixtures/great_expectations_site_builder.yml"
),
str(os.path.join(project_dir, "great_expectations", "great_expectations.yml")),
)
context = ge.data_context.DataContext(
context_root_dir=os.path.join(project_dir, "great_expectations")
)
context.add_datasource(
"titanic",
class_name="PandasDatasource",
batch_kwargs_generators={
"subdir_reader": {
"class_name": "SubdirReaderBatchKwargsGenerator",
"base_directory": os.path.join(project_dir, "data/titanic/"),
}
},
)
context.add_datasource(
"random",
class_name="PandasDatasource",
batch_kwargs_generators={
"subdir_reader": {
"class_name": "SubdirReaderBatchKwargsGenerator",
"base_directory": os.path.join(project_dir, "data/random/"),
}
},
)
context.profile_datasource("titanic")
context.profile_datasource("random")
context.profile_datasource(context.list_datasources()[0]["name"])
context._project_config.anonymous_usage_statistics = {
"enabled": True,
"data_context_id": "f43d4897-385f-4366-82b0-1a8eda2bf79c",
}
return context
@pytest.fixture
def titanic_multibatch_data_context(tmp_path_factory):
"""
Based on titanic_data_context, but with 2 identical batches of
data asset "titanic"
:param tmp_path_factory:
:return:
"""
project_path = str(tmp_path_factory.mktemp("titanic_data_context"))
context_path = os.path.join(project_path, "great_expectations")
os.makedirs(os.path.join(context_path, "expectations"), exist_ok=True)
data_path = os.path.join(context_path, "../data/titanic")
os.makedirs(os.path.join(data_path), exist_ok=True)
shutil.copy(
file_relative_path(__file__, "./test_fixtures/great_expectations_titanic.yml"),
str(os.path.join(context_path, "great_expectations.yml")),
)
shutil.copy(
file_relative_path(__file__, "./test_sets/Titanic.csv"),
str(os.path.join(context_path, "../data/titanic/Titanic_1911.csv")),
)
shutil.copy(
file_relative_path(__file__, "./test_sets/Titanic.csv"),
str(os.path.join(context_path, "../data/titanic/Titanic_1912.csv")),
)
return ge.data_context.DataContext(context_path)
@pytest.fixture
def v10_project_directory(tmp_path_factory):
"""
GE 0.10.x project for testing upgrade helper
"""
project_path = str(tmp_path_factory.mktemp("v10_project"))
context_root_dir = os.path.join(project_path, "great_expectations")
shutil.copytree(
file_relative_path(
__file__, "./test_fixtures/upgrade_helper/great_expectations_v10_project/"
),
context_root_dir,
)
shutil.copy(
file_relative_path(
__file__, "./test_fixtures/upgrade_helper/great_expectations_v1_basic.yml"
),
os.path.join(context_root_dir, "great_expectations.yml"),
)
return context_root_dir
@pytest.fixture
def data_context_parameterized_expectation_suite(tmp_path_factory):
"""
This data_context is *manually* created to have the config we want, vs
created with DataContext.create()
"""
project_path = str(tmp_path_factory.mktemp("data_context"))
context_path = os.path.join(project_path, "great_expectations")
asset_config_path = os.path.join(context_path, "expectations")
fixture_dir = file_relative_path(__file__, "./test_fixtures")
os.makedirs(
os.path.join(asset_config_path, "my_dag_node"), exist_ok=True,
)
shutil.copy(
os.path.join(fixture_dir, "great_expectations_basic.yml"),
str(os.path.join(context_path, "great_expectations.yml")),
)
shutil.copy(
os.path.join(
fixture_dir,
"expectation_suites/parameterized_expectation_suite_fixture.json",
),
os.path.join(asset_config_path, "my_dag_node/default.json"),
)
os.makedirs(os.path.join(context_path, "plugins"), exist_ok=True)
shutil.copy(
os.path.join(fixture_dir, "custom_pandas_dataset.py"),
str(os.path.join(context_path, "plugins", "custom_pandas_dataset.py")),
)
shutil.copy(
os.path.join(fixture_dir, "custom_sqlalchemy_dataset.py"),
str(os.path.join(context_path, "plugins", "custom_sqlalchemy_dataset.py")),
)
shutil.copy(
os.path.join(fixture_dir, "custom_sparkdf_dataset.py"),
str(os.path.join(context_path, "plugins", "custom_sparkdf_dataset.py")),
)
return ge.data_context.DataContext(context_path)
@pytest.fixture
def data_context_with_bad_notebooks(tmp_path_factory):
"""
This data_context is *manually* created to have the config we want, vs
created with DataContext.create()
"""
project_path = str(tmp_path_factory.mktemp("data_context"))
context_path = os.path.join(project_path, "great_expectations")
asset_config_path = os.path.join(context_path, "expectations")
fixture_dir = file_relative_path(__file__, "./test_fixtures")
custom_notebook_assets_dir = "notebook_assets"
os.makedirs(
os.path.join(asset_config_path, "my_dag_node"), exist_ok=True,
)
shutil.copy(
os.path.join(fixture_dir, "great_expectations_basic_with_bad_notebooks.yml"),
str(os.path.join(context_path, "great_expectations.yml")),
)
shutil.copy(
os.path.join(
fixture_dir,
"expectation_suites/parameterized_expectation_suite_fixture.json",
),
os.path.join(asset_config_path, "my_dag_node/default.json"),
)
os.makedirs(os.path.join(context_path, "plugins"), exist_ok=True)
shutil.copytree(
os.path.join(fixture_dir, custom_notebook_assets_dir),
str(os.path.join(context_path, "plugins", custom_notebook_assets_dir)),
)
return ge.data_context.DataContext(context_path)
@pytest.fixture
def data_context_custom_notebooks(tmp_path_factory):
"""
This data_context is *manually* created to have the config we want, vs
created with DataContext.create()
"""
project_path = str(tmp_path_factory.mktemp("data_context"))
context_path = os.path.join(project_path, "great_expectations")
asset_config_path = os.path.join(context_path, "expectations")
fixture_dir = file_relative_path(__file__, "./test_fixtures")
os.makedirs(
os.path.join(asset_config_path, "my_dag_node"), exist_ok=True,
)
shutil.copy(
os.path.join(fixture_dir, "great_expectations_custom_notebooks.yml"),
str(os.path.join(context_path, "great_expectations.yml")),
)
shutil.copy(
os.path.join(
fixture_dir,
"expectation_suites/parameterized_expectation_suite_fixture.json",
),
os.path.join(asset_config_path, "my_dag_node/default.json"),
)
os.makedirs(os.path.join(context_path, "plugins"), exist_ok=True)
return ge.data_context.DataContext(context_path)
@pytest.fixture
def data_context_simple_expectation_suite(tmp_path_factory):
"""
This data_context is *manually* created to have the config we want, vs
created with DataContext.create()
"""
project_path = str(tmp_path_factory.mktemp("data_context"))
context_path = os.path.join(project_path, "great_expectations")
asset_config_path = os.path.join(context_path, "expectations")
fixture_dir = file_relative_path(__file__, "./test_fixtures")
os.makedirs(
os.path.join(asset_config_path, "my_dag_node"), exist_ok=True,
)
shutil.copy(
os.path.join(fixture_dir, "great_expectations_basic.yml"),
str(os.path.join(context_path, "great_expectations.yml")),
)
shutil.copy(
os.path.join(fixture_dir, "rendering_fixtures/expectations_suite_1.json",),
os.path.join(asset_config_path, "default.json"),
)
os.makedirs(os.path.join(context_path, "plugins"), exist_ok=True)
shutil.copy(
os.path.join(fixture_dir, "custom_pandas_dataset.py"),
str(os.path.join(context_path, "plugins", "custom_pandas_dataset.py")),
)
shutil.copy(
os.path.join(fixture_dir, "custom_sqlalchemy_dataset.py"),
str(os.path.join(context_path, "plugins", "custom_sqlalchemy_dataset.py")),
)
shutil.copy(
os.path.join(fixture_dir, "custom_sparkdf_dataset.py"),
str(os.path.join(context_path, "plugins", "custom_sparkdf_dataset.py")),
)
return ge.data_context.DataContext(context_path)
@pytest.fixture()
def filesystem_csv_data_context(empty_data_context, filesystem_csv_2):
empty_data_context.add_datasource(
"rad_datasource",
module_name="great_expectations.datasource",
class_name="PandasDatasource",
batch_kwargs_generators={
"subdir_reader": {
"class_name": "SubdirReaderBatchKwargsGenerator",
"base_directory": str(filesystem_csv_2),
}
},
)
return empty_data_context
@pytest.fixture
def filesystem_csv(tmp_path_factory):
base_dir = tmp_path_factory.mktemp("filesystem_csv")
base_dir = str(base_dir)
# Put a few files in the directory
with open(os.path.join(base_dir, "f1.csv"), "w") as outfile:
outfile.writelines(["a,b,c\n"])
with open(os.path.join(base_dir, "f2.csv"), "w") as outfile:
outfile.writelines(["a,b,c\n"])
os.makedirs(os.path.join(base_dir, "f3"), exist_ok=True)
with open(os.path.join(base_dir, "f3", "f3_20190101.csv"), "w") as outfile:
outfile.writelines(["a,b,c\n"])
with open(os.path.join(base_dir, "f3", "f3_20190102.csv"), "w") as outfile:
outfile.writelines(["a,b,c\n"])
return base_dir
@pytest.fixture
def filesystem_csv_2(tmp_path_factory):
base_dir = tmp_path_factory.mktemp("test_files")
base_dir = str(base_dir)
# Put a file in the directory
toy_dataset = PandasDataset({"x": [1, 2, 3]})
toy_dataset.to_csv(os.path.join(base_dir, "f1.csv"), index=None)
return base_dir
@pytest.fixture
def filesystem_csv_3(tmp_path_factory):
base_dir = tmp_path_factory.mktemp("test_files")
base_dir = str(base_dir)
# Put a file in the directory
toy_dataset = PandasDataset({"x": [1, 2, 3]})
toy_dataset.to_csv(os.path.join(base_dir, "f1.csv"), index=None)
toy_dataset_2 = PandasDataset({"y": [1, 2, 3]})
toy_dataset_2.to_csv(os.path.join(base_dir, "f2.csv"), index=None)
return base_dir
@pytest.fixture()
def filesystem_csv_4(tmp_path_factory):
base_dir = tmp_path_factory.mktemp("test_files")
base_dir = str(base_dir)
# Put a file in the directory
toy_dataset = PandasDataset({"x": [1, 2, 3], "y": [1, 2, 3],})
toy_dataset.to_csv(os.path.join(base_dir, "f1.csv"), index=None)
return base_dir
@pytest.fixture
def titanic_profiled_evrs_1():
with open(
file_relative_path(
__file__, "./render/fixtures/BasicDatasetProfiler_evrs.json"
),
"r",
) as infile:
return expectationSuiteValidationResultSchema.loads(infile.read())
@pytest.fixture
def titanic_profiled_name_column_evrs():
# This is a janky way to fetch expectations matching a specific name from an EVR suite.
# TODO: It will no longer be necessary once we implement ValidationResultSuite._group_evrs_by_column
from great_expectations.render.renderer.renderer import Renderer
with open(
file_relative_path(
__file__, "./render/fixtures/BasicDatasetProfiler_evrs.json"
),
"r",
) as infile:
titanic_profiled_evrs_1 = expectationSuiteValidationResultSchema.load(
json.load(infile)
)
evrs_by_column = Renderer()._group_evrs_by_column(titanic_profiled_evrs_1)
name_column_evrs = evrs_by_column["Name"]
return name_column_evrs
@pytest.fixture
def titanic_profiled_expectations_1():
with open(
file_relative_path(
__file__, "./render/fixtures/BasicDatasetProfiler_expectations.json"
),
"r",
) as infile:
return expectationSuiteSchema.load(json.load(infile))
@pytest.fixture
def titanic_profiled_name_column_expectations():
from great_expectations.render.renderer.renderer import Renderer
with open(
file_relative_path(
__file__, "./render/fixtures/BasicDatasetProfiler_expectations.json"
),
"r",
) as infile:
titanic_profiled_expectations = expectationSuiteSchema.load(json.load(infile))
columns, ordered_columns = Renderer()._group_and_order_expectations_by_column(
titanic_profiled_expectations
)
name_column_expectations = columns["Name"]
return name_column_expectations
@pytest.fixture
def titanic_validation_results():
with open(
file_relative_path(__file__, "./test_sets/expected_cli_results_default.json"),
"r",
) as infile:
return expectationSuiteValidationResultSchema.load(json.load(infile))
# various types of evr
@pytest.fixture
def evr_failed():
return ExpectationValidationResult(
success=False,
result={
"element_count": 1313,
"missing_count": 0,
"missing_percent": 0.0,
"unexpected_count": 3,
"unexpected_percent": 0.2284843869002285,
"unexpected_percent_nonmissing": 0.2284843869002285,
"partial_unexpected_list": [
"Daly, Mr Peter Denis ",
"Barber, Ms ",
"Geiger, Miss Emily ",
],
"partial_unexpected_index_list": [77, 289, 303],
"partial_unexpected_counts": [
{"value": "Barber, Ms ", "count": 1},
{"value": "Daly, Mr Peter Denis ", "count": 1},
{"value": "Geiger, Miss Emily ", "count": 1},
],
},
exception_info={
"raised_exception": False,
"exception_message": None,
"exception_traceback": None,
},
expectation_config={
"expectation_type": "expect_column_values_to_not_match_regex",
"kwargs": {
"column": "Name",
"regex": "^\\s+|\\s+$",
"result_format": "SUMMARY",
},
},
)
@pytest.fixture
def evr_failed_with_exception():
return ExpectationValidationResult(
success=False,
exception_info={
"raised_exception": True,
"exception_message": "Invalid partition object.",
"exception_traceback": 'Traceback (most recent call last):\n File "/great_expectations/great_expectations/data_asset/data_asset.py", line 216, in wrapper\n return_obj = func(self, **evaluation_args)\n File "/great_expectations/great_expectations/dataset/dataset.py", line 106, in inner_wrapper\n evaluation_result = func(self, column, *args, **kwargs)\n File "/great_expectations/great_expectations/dataset/dataset.py", line 3381, in expect_column_kl_divergence_to_be_less_than\n raise ValueError("Invalid partition object.")\nValueError: Invalid partition object.\n',
},
expectation_config=ExpectationConfiguration(
expectation_type="expect_column_kl_divergence_to_be_less_than",
kwargs={
"column": "live",
"partition_object": None,
"threshold": None,
"result_format": "SUMMARY",
},
meta={"BasicDatasetProfiler": {"confidence": "very low"}},
),
)
@pytest.fixture
def evr_success():
return ExpectationValidationResult(
success=True,
result={"observed_value": 1313},
exception_info={
"raised_exception": False,
"exception_message": None,
"exception_traceback": None,
},
expectation_config=ExpectationConfiguration(
expectation_type="expect_table_row_count_to_be_between",
kwargs={"min_value": 0, "max_value": None, "result_format": "SUMMARY"},
),
)
@pytest.fixture
def sqlite_view_engine(test_backends):
# Create a small in-memory engine with two views, one of which is temporary
if "sqlite" in test_backends:
import sqlalchemy as sa
sqlite_engine = sa.create_engine("sqlite://")
df = pd.DataFrame({"a": [1, 2, 3, 4, 5]})
df.to_sql("test_table", con=sqlite_engine)
sqlite_engine.execute(
"CREATE TEMP VIEW test_temp_view AS SELECT * FROM test_table where a < 4;"
)
sqlite_engine.execute(
"CREATE VIEW test_view AS SELECT * FROM test_table where a > 4;"
)
return sqlite_engine
else:
pytest.skip("SqlAlchemy tests disabled; not testing views")
@pytest.fixture
def expectation_suite_identifier():
return ExpectationSuiteIdentifier("my.expectation.suite.name")
@pytest.fixture
def basic_sqlalchemy_datasource(sqlitedb_engine):
return SqlAlchemyDatasource("basic_sqlalchemy_datasource", engine=sqlitedb_engine)
|
def fibonacci(n):
series = []
a, b = 0, 1
if n == 0:
return a
elif n == 1:
return b
else:
series.append(a)
series.append(b)
for i in range(2,n):
series.append(series[i-1] + series[i-2])
return series
print(fibonacci(10))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import socket
import rospy
import math
import os
import cv2
from geometry_msgs.msg import Pose, Point, Quaternion
from std_msgs.msg import Float32MultiArray
from prometheus_msgs.msg import DetectionInfo, MultiDetectionInfo
rospy.init_node('oriented_object_segs_client', anonymous=True)
def load_class_desc(dataset='coco'):
"""
载入class_desc文件夹中的类别信息,txt文件的每一行代表一个类别
:param dataset: str 'coco'
:return: list ['cls1', 'cls2', ...]
"""
desc_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'class_desc')
desc_names = []
for f in os.listdir(desc_dir):
if f.endswith('.txt'):
desc_names.append(os.path.splitext(f)[0])
# 如果类别描述文件存在,则返回所有类别名称,否则会报错
cls_names = []
cls_ws = []
cls_hs = []
if dataset in desc_names:
with open(os.path.join(desc_dir, dataset + '.txt')) as f:
for line in f.readlines():
if len(line.strip()) > 0:
name_wh = line.strip().split(',')
if len(name_wh) == 3:
cls_names.append(name_wh[0])
cls_ws.append(float(name_wh[1]))
cls_hs.append(float(name_wh[2]))
else:
cls_names.append(line.strip())
cls_ws.append(0)
cls_hs.append(0)
else:
raise NameError('{}.txt not exist in "class_desc"'.format(dataset))
# 类别描述文件不能为空,否则会报错
if len(cls_names) > 0:
return cls_names, cls_ws, cls_hs
else:
raise RuntimeError('{}.txt is EMPTY'.format(dataset))
pub_topic_name = rospy.get_param('~output_topic', '/prometheus/object_detection/oriented_object_segs')
object_names_txt = rospy.get_param('~object_names_txt', 'dota')
config = rospy.get_param('~camera_parameters', 'camera_param.yaml')
cls_names, cls_ws, cls_hs = load_class_desc(object_names_txt)
# print(pub_topic_name)
# print(object_names_txt)
# print(cls_names)
# print(config)
fs = cv2.FileStorage(config, cv2.FileStorage_READ)
image_width = int(fs.getNode('image_width').real())
image_height = int(fs.getNode('image_height').real())
camera_matrix = fs.getNode('camera_matrix').mat()
distortion_coefficients = fs.getNode('distortion_coefficients').mat()
fs.release()
print(image_width)
print(image_height)
print(camera_matrix)
print(distortion_coefficients)
pub = rospy.Publisher(pub_topic_name, MultiDetectionInfo, queue_size=1)
rate = rospy.Rate(100)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('127.0.0.1', 9095))
last_fid = 0
m_info = MultiDetectionInfo()
m_info.num_objs = 0
# print(camera_matrix[0][0])
# print(camera_matrix[1][1])
aos_x = math.atan(image_width / 2. / camera_matrix[0][0]) # angle of sight
aos_y = math.atan(image_height / 2. / camera_matrix[1][1])
# print(aos_x)
# print(aos_y)
while not rospy.is_shutdown():
data = s.recv(62+24) # 35
data = data.decode('utf-8')
print(data)
if len(data) > 0:
nums = data.split(',')
if len(nums) == 16:
frame_id = int(nums[0])
deted = int(nums[1])
order = int(nums[2])
cls = int(nums[3])
x1, y1, x2, y2 = float(nums[4]), float(nums[5]), float(nums[6]), float(nums[7])
x3, y3, x4, y4 = float(nums[8]), float(nums[9]), float(nums[10]), float(nums[11])
score = float(nums[12])
pixel_cx = int(nums[13])
pixel_cy = int(nums[14])
detect_track = int(nums[15]) # 0:detect, 1:track
m_info.detect_or_track = detect_track
# print(frame_id)
if deted >= 1:
d_info = DetectionInfo()
d_info.detected = True
d_info.frame = frame_id
d_info.object_name = cls_names[cls]
d_info.category = cls
d_info.sight_angle = [((x1+x2+x3+x4)/4.-0.5)*aos_x, ((y1+y2+y3+y4)/4.-0.5)*aos_y]
d_info.pixel_position = [pixel_cx, pixel_cy]
if cls_hs[cls] > 0:
depth = (cls_hs[cls]*camera_matrix[1][1]) / (h*image_height)
d_info.position = [math.tan(d_info.sight_angle[0])*depth, math.tan(d_info.sight_angle[1])*depth, depth]
m_info.detection_infos.append(d_info)
m_info.num_objs += 1
for i in range(deted):
if i > 0:
data = s.recv(62+24) # 35
data = data.decode('utf-8')
print(data)
if len(data) > 0:
nums = data.split(',')
if len(nums) == 16:
frame_id = int(nums[0])
deted = int(nums[1])
order = int(nums[2])
cls = int(nums[3])
x1, y1, x2, y2 = float(nums[4]), float(nums[5]), float(nums[6]), float(nums[7])
x3, y3, x4, y4 = float(nums[8]), float(nums[9]), float(nums[10]), float(nums[11])
score = float(nums[12])
pixel_cx = int(nums[13])
pixel_cy = int(nums[14])
detect_track = int(nums[15]) # 0:detect, 1:track
assert order == i, "server error"
d_info = DetectionInfo()
d_info.detected = True
d_info.frame = frame_id
d_info.object_name = cls_names[cls]
d_info.category = cls
d_info.sight_angle = [((x1+x2+x3+x4)/4.-0.5)*aos_x, ((y1+y2+y3+y4)/4.-0.5)*aos_y]
d_info.pixel_position = [pixel_cx, pixel_cy]
if cls_hs[cls] > 0:
depth = (cls_hs[cls]*camera_matrix[1][1]) / (h*image_height)
d_info.position = [math.tan(d_info.sight_angle[0])*depth, math.tan(d_info.sight_angle[1])*depth, depth]
m_info.detection_infos.append(d_info)
m_info.num_objs += 1
if frame_id != last_fid:
pub.publish(m_info)
m_info = MultiDetectionInfo()
m_info.num_objs = 0
last_fid = frame_id
# print("{:.3f}, {:.3f}, {:.3f}, {:.3f}".format(ex, ey ,ess, speed))
# py_array = [ex * qx / 57.3, ey * qy / 57.3, ess, dt, lock_stat, prop]
# ros_array = Float32MultiArray(data=py_array)
# pose = Pose(Point(ex, ey ,ess), Quaternion(speed, dt, lock_stat, 0.))
# pub.publish(ros_array)
rate.sleep()
|
from django.apps import AppConfig
class MyserConfig(AppConfig):
name = 'myser'
|
#!/usr/bin/env python3
import numpy as np
import re
import random
# ---- Hamming code classes --- #
# This code assumes that words and codewords are encoded as row vectors.
# Thus, word w is encoded into codeword c with w.G and c is decoded with c.H.
# ---- Hamming encoder class --- #
class HammingEncoder(object):
"""Takes a source message and adds Hamming parity-check bits"""
def __init__(self, r):
"""Constructs a Hamming encoder"""
self.r = r
self.n = 2 ** self.r - 1
self.genmatrix = self.__make_genmatrix()
def __make_genmatrix(self):
"""Creates the generator matrix for the Hamming code"""
genmatrix = np.zeros((self.n - self.r, self.n), dtype=np.uint) # k x n
p_set = set([2 ** i - 1 for i in range(self.r)])
d_set = set(range(self.n)) - p_set
# fills in parity bit columns of the generator matrix
for p_item in p_set:
for d_index, d_item in enumerate(d_set):
if (p_item + 1) & (d_item + 1) != 0:
genmatrix[d_index][p_item] = 1
# fills in data bit columns of the generator matrix
for d_index, d_item in enumerate(d_set):
genmatrix[d_index][d_item] = 1
return genmatrix
def encode(self, word):
"""Constructs a codeword with parity bits given a word of an appropriate length.
Assumes that the input is a string of 0s and 1s"""
if len(word) != (self.n - self.r):
raise ValueError("Wrong word length")
return arr_to_str(np.dot(str_to_arr(word), self.genmatrix) % 2)
# ---- Hamming checker class --- #
class HammingChecker(object):
"""Reads a codeword and checks if the word bits and the parity bits match up"""
def __init__(self, r):
"""Constructs a Hamming parity-checker"""
self.r = r
self.n = 2 ** self.r - 1
self.checkmatrix = self.__make_checkmatrix()
def __make_checkmatrix(self):
"""Creates the parity-check matrix for the Hamming code"""
p_set = set([2 ** i - 1 for i in range(self.r)])
d_set = set(range(self.n)) - p_set
checkmatrix = np.zeros((self.n, self.r), dtype=np.uint) # n x r
# filling in parity bit rows of the parity check matrix
for d_item in d_set:
for index in range(self.r):
checkmatrix[d_item, index] = int(((d_item + 1) >> index) & 1)
# filling in data bit rows of the parity check matrix
for p_index, p_item in enumerate(p_set):
checkmatrix[p_item][p_index] = 1
return checkmatrix
def get_matching_row(self, row):
"""Searches for a row in the parity-check matrix and returns its index.
Returns -1 if not found."""
try:
return np.where(np.all(self.checkmatrix == row, axis=1))[0][0]
except IndexError:
return -1
def check(self, codeword):
"""Checks if a codeword's word bits and parity bits match up."""
if len(codeword) != (self.n):
raise ValueError("Codeword is the wrong length.")
return self.get_matching_row(np.dot(str_to_arr(codeword), self.checkmatrix) % 2)
def correct(self, codeword):
"""Tries to correct the corrupted bit."""
if len(codeword) != (self.n):
raise ValueError("Codeword is the wrong length.")
cw_arr = str_to_arr(codeword)
res = self.get_matching_row(np.dot(cw_arr, self.checkmatrix) % 2)
if res != -1:
cw_arr[res] = (cw_arr[res] + 1) % 2
return arr_to_str(cw_arr)
else:
return codeword
# ---- Conversion utilities --- #
def str_to_arr(s):
"""Converts binary string to numpy array"""
if not re.fullmatch(r'(0|1)*', s):
raise ValueError('Input must be in binary.')
return np.array([int(d) for d in s], dtype=np.uint)
def arr_to_str(arr):
"""Converts numpy array to string"""
return re.sub(r'\[|\]|\s+', '', np.array_str(arr))
# ---- Helper functions --- #
def random_word(len):
"""Returns random binary word at the given length"""
return ''.join([random.choice(('0', '1')) for _ in range(len)])
def add_noise(s, p):
"""Adds noise to transmissions"""
arr = str_to_arr(s)
count = 0
for i in range(len(arr)):
r = random.random()
if (r < p):
arr[i] = (arr[i] + 1) % 2
count += 1
return arr_to_str(arr), count
|
# -*- coding: utf-8 -*-
"""
Created on Thu May 28 17:08:30 2020
@author: Christopher Cheng
"""
import time
print("Loading...")
for i in range(10):
print("[",i*"*",(10-i)*" ","]",i*10,"% Complete")
time.sleep(0.5)
|
import os
from functools import reduce
from core.TemplateEngine import render
mydir = os.path.dirname(__file__)
def tab(n):
if n <= 0:
return ""
else:
# return "\t" * n
return " " * n
def indent(lines):
return [tab(1) + line for line in lines]
def convert_to_cpptype_string(vtype):
"""
:param vtype:
:return: その型に対応するC++における文字列表現
"""
if vtype == float:
return "long double"
elif vtype == int:
return "long long"
elif vtype == str:
return "string"
else:
raise NotImplementedError
def input_code(vtype, vname_for_input):
if vtype == float:
return 'scanf("%Lf",&{name})'.format(name=vname_for_input)
elif vtype == int:
return 'scanf("%lld",&{name})'.format(name=vname_for_input)
elif vtype == str:
return 'cin >> {name}'.format(name=vname_for_input)
else:
raise NotImplementedError
def generate_declaration(v):
"""
:param v: 変数情報
:return: 変数vの宣言パートを作る ex) array[1..n] → vector<int> array = vector<int>(n-1+1);
"""
dim = len(v.indexes)
typename = convert_to_cpptype_string(v.type)
if dim == 0:
type_template_before = "{type}".format(type=typename)
type_template_after = ""
elif dim == 1:
type_template_before = "vector<{type}>".format(type=typename)
type_template_after = "({size})".format(
size=v.indexes[0].zero_indexed().max_index_plus_1)
elif dim == 2:
type_template_before = "vector< vector<{type}> >".format(type=typename)
type_template_after = "({row_size},vector<{type}>({col_size}))".format(
type=typename,
row_size=v.indexes[0].zero_indexed().max_index_plus_1,
col_size=v.indexes[1].zero_indexed().max_index_plus_1
)
else:
raise NotImplementedError
line = "{declaration} {name}{constructor};".format(
name=v.name,
declaration=type_template_before,
constructor=type_template_after
)
return line
def generate_arguments(var_information):
"""
:param var_information: 全変数の情報
:return: 仮引数、実引数の文字列表現(順序は両者同じ);
- formal_params: 仮引数 ex) int a, string b, vector<int> ccc
- actual_params : 実引数 ex) a, b, ccc
"""
formal_lst = []
actual_lst = []
for name, v in var_information.items():
dim = len(v.indexes)
typename = convert_to_cpptype_string(v.type)
if dim == 0:
type_template = "{type}".format(type=typename)
elif dim == 1:
type_template = "vector<{type}>".format(type=typename)
elif dim == 2:
type_template = "vector< vector<{type}> >".format(type=typename)
else:
raise NotImplementedError
formal_lst.append("{type} {name}".format(
type=type_template, name=name))
actual_lst.append(name)
formal_params = ", ".join(formal_lst)
actual_params = ", ".join(actual_lst)
return formal_params, actual_params
def generate_input_part(node, var_information, inputted, undeclared, depth, indexes):
"""
:param node: FormatPredictorで得られる解析結果の木(const)
:param var_information: 変数の情報(const)
:param inputted: 入力が完了した変数名集合 (呼ぶときはset())
:param undeclared: 入力が完了していない変数名集合 (呼ぶときはset(現れる変数全部))
:param depth: ネストの深さ (呼ぶときは0で呼ぶ)
:param indexes: 二重ループで再帰してるとき、indexes=["i","j"]みたいな感じになってる。 (呼ぶときは[])
:return: 入力コードの列
"""
lines = []
def declare_if_ready():
"""
サブルーチンです。例えば
K N a_1 ...a_N
という入力に対して、Nを代入する前に
vector<int> a(N);
を宣言してしまうと悲しいので、既に必要な変数が全て入力されたものから宣言していく。
"""
nonlocal lines, inputted, undeclared, var_information
will_declare = []
for vname in undeclared:
related_vars = reduce(lambda a, b: a + b,
[index.min_index.get_all_varnames() + index.max_index.get_all_varnames()
for index in var_information[vname].indexes], []
)
if all([(var in inputted) for var in related_vars]):
will_declare.append(vname)
for vname in will_declare:
lines.append(generate_declaration(var_information[vname]))
undeclared.remove(vname)
if depth == 0:
# 入力の開始時、何の制約もない変数をまず全部宣言する (depth=-1 <=> 入力の開始)
declare_if_ready()
if node.pointers is not None:
'''
何かしらの塊を処理(インデックスを持っている場合はループ)
[a,b,c] or [ai,bi,ci](min<=i<=max) みたいな感じ
'''
if node.index is None:
for child in node.pointers:
lines += generate_input_part(child, var_information,
inputted, undeclared, depth + 1, indexes)
else:
loopv = "i" if indexes == [] else "j"
# ループの開始
lines.append("for(int {x} = {start} ; {x} < {end} ; {x}++){{".format(
x=loopv,
start=node.index.zero_indexed().min_index,
end=node.index.zero_indexed().max_index_plus_1)
)
# ループの内側
for child in node.pointers:
lines += indent(generate_input_part(child, var_information,
inputted, undeclared, depth + 1, indexes + [loopv]))
# ループの外
if node.index is not None:
lines.append("}")
else:
''' 変数が最小単位まで分解されたときの入力処理 '''
vname_for_input = node.varname + \
("" if indexes == [] else "[" + "][".join(indexes) + "]")
vtype = var_information[node.varname].type
line = "{input_code};".format(
input_code=input_code(vtype, vname_for_input))
lines.append(line)
inputted.add(node.varname)
declare_if_ready()
return lines
def code_generator(predict_result=None):
with open("{dir}/template_success.cpp".format(dir=mydir), "r") as f:
template_success = f.read()
with open("{dir}/template_failure.cpp".format(dir=mydir), "r") as f:
template_failure = f.read()
if predict_result is not None:
formal_arguments, actual_arguments = generate_arguments(
predict_result.var_information)
input_part_lines = generate_input_part(
node=predict_result.analyzed_root,
var_information=predict_result.var_information,
inputted=set(),
undeclared=set(predict_result.var_information.keys()),
depth=0,
indexes=[]
)
code = render(template_success,
formal_arguments=formal_arguments,
actual_arguments=actual_arguments,
input_part=input_part_lines)
else:
code = template_failure
return code
|
import pandas as pd
import numpy as np
def build_and_clean_df(original_df, sub_features):
df = original_df[sub_features].copy()
df = df.replace({np.nan: None})
df = df.astype(
{
"commune": "category",
"enqu": "category",
"wilaya": "category",
"ident": "category",
"numquest": "category",
}
)
df["TxDep"] = df["TxDep"].fillna(0)
return df
def label_encoders_generator(df, list_of_columns_to_encode):
label_encoders = []
for col in list_of_columns_to_encode:
le = pd.DataFrame(df[col].unique()).reset_index()
label_encoders.append(le)
return label_encoders
def encode_df(df, list_of_cols, list_of_label_encoded_features):
for k in range(len(list_of_label_encoded_features)):
col_name = list_of_cols[k]
label_encoded_features = list_of_label_encoded_features[k]
df = df.merge(
label_encoded_features,
how="left",
left_on=col_name,
right_on=0,
suffixes=("", "_right"),
).drop(columns=[0, col_name])
df = df.rename(columns={"index": col_name})
return df
def decode_df(df, list_of_cols, list_of_label_encoded_features):
for k in range(len(list_of_label_encoded_features)):
col_name = list_of_cols[k]
label_encoded_features = list_of_label_encoded_features[k]
df[col_name] = df[col_name].astype("int")
df = df.merge(
label_encoded_features,
how="left",
left_on=col_name,
right_on="index",
suffixes=("", "_right"),
).drop(columns=["index", col_name])
df = df.rename(columns={0: col_name})
return df
|
"""
Base settings to build other settings files upon.
"""
import environ
#import django
ROOT_DIR = environ.Path(__file__) - 3 # (nobody_will_see_this/config/settings/base.py - 3 = nobody_will_see_this/)
APPS_DIR = ROOT_DIR.path('nobody_will_see_this')
env = environ.Env()
READ_DOT_ENV_FILE = env.bool('DJANGO_READ_DOT_ENV_FILE', default=False)
if READ_DOT_ENV_FILE:
# OS environment variables take precedence over variables from .env
env.read_env(str(ROOT_DIR.path('.env')))
# GENERAL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = env.bool('DJANGO_DEBUG', False)
# Local time zone. Choices are
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# though not all of them may be available with every OS.
# In Windows, this must be set to your system time zone.
TIME_ZONE = 'America/Phoenix'
# https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-us'
# https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
# DATABASES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
#'default': env.db('DATABASE_URL', default='postgres://localhost/watervize'),
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'nwst',
'USER': 'nwst',
'PASSWORD': env('DATABASE_PASS', default='pass'),
'HOST': 'localhost', # Or an IP Address that your DB is hosted on
'PORT': '5432',
'ATOMIC_REQUESTS': True
}
}
#DATABASES['default']['ATOMIC_REQUESTS'] = True
#DATABASES['default']['ENGINE'] = 'django.contrib.gis.db.backends.postgis'
CONN_MAX_AGE = 10
# URLS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#root-urlconf
ROOT_URLCONF = 'config.urls'
# https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'config.wsgi.application'
# APPS
# ------------------------------------------------------------------------------
DJANGO_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# 'django.contrib.humanize', # Handy template tags
'django.contrib.admin',
'django.forms',
]
THIRD_PARTY_APPS = [
'crispy_forms',
'allauth',
'allauth.account',
'allauth.socialaccount',
'corsheaders',
'rest_framework',
'rest_framework_datatables',
#'bootstrap4',
#'django_extensions',
#'extra_views',
]
LOCAL_APPS = [
'nobody_will_see_this.users.apps.UsersAppConfig',
# Your stuff: custom apps go here
'nobody_will_see_this.streams',
]
# https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
# MIGRATIONS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#migration-modules
MIGRATION_MODULES = {
'sites': 'nobody_will_see_this.contrib.sites.migrations'
}
# AUTHENTICATION
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#authentication-backends
AUTHENTICATION_BACKENDS = [
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
]
# https://docs.djangoproject.com/en/dev/ref/settings/#auth-user-model
AUTH_USER_MODEL = 'users.User'
# https://docs.djangoproject.com/en/dev/ref/settings/#login-redirect-url
LOGIN_REDIRECT_URL = 'users:redirect'
# https://docs.djangoproject.com/en/dev/ref/settings/#login-url
LOGIN_URL = 'account_login'
# PASSWORDS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers
PASSWORD_HASHERS = [
# https://docs.djangoproject.com/en/dev/topics/auth/passwords/#using-argon2-with-django
'django.contrib.auth.hashers.Argon2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
'django.contrib.auth.hashers.BCryptPasswordHasher',
]
# https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# MIDDLEWARE
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#middleware
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
# STATIC
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = str(ROOT_DIR('staticfiles'))
# https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = [
str(APPS_DIR.path('static')),
]
# https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
# MEDIA
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = str(APPS_DIR('media'))
# https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
# TEMPLATES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES = [
{
# https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
'DIRS': [
str(APPS_DIR.path('templates')),
],
'OPTIONS': {
# https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
'debug': DEBUG,
# https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
# https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
# https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
},
]
# http://django-crispy-forms.readthedocs.io/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = 'bootstrap4'
# FIXTURES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#fixture-dirs
FIXTURE_DIRS = (
str(APPS_DIR.path('fixtures')),
)
# EMAIL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.smtp.EmailBackend')
# ADMIN
# ------------------------------------------------------------------------------
# Django Admin URL.
ADMIN_URL = 'jds783892fh3yu2ghif84huycr3g6iur32ygtfr76g8c2ewh3td78ci34ouryv/'
# https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = [
("""Omen Apps""", 'support@omenapps.com'),
]
# https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
# Celery
# ------------------------------------------------------------------------------
INSTALLED_APPS += ['nobody_will_see_this.taskapp.celery.CeleryAppConfig']
if USE_TZ:
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-timezone
CELERY_TIMEZONE = TIME_ZONE
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-broker_url
CELERY_BROKER_URL = env('CELERY_BROKER_URL')
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-result_backend
CELERY_RESULT_BACKEND = CELERY_BROKER_URL
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-accept_content
CELERY_ACCEPT_CONTENT = ['json']
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-task_serializer
CELERY_TASK_SERIALIZER = 'json'
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-result_serializer
CELERY_RESULT_SERIALIZER = 'json'
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-time-limit
# TODO: set to whatever value is adequate in your circumstances
CELERYD_TASK_TIME_LIMIT = 5 * 60
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-soft-time-limit
# TODO: set to whatever value is adequate in your circumstances
CELERYD_TASK_SOFT_TIME_LIMIT = 60
# django-allauth
# ------------------------------------------------------------------------------
ACCOUNT_ALLOW_REGISTRATION = env.bool('DJANGO_ACCOUNT_ALLOW_REGISTRATION', True)
# https://django-allauth.readthedocs.io/en/latest/configuration.html
ACCOUNT_AUTHENTICATION_METHOD = 'username'
# https://django-allauth.readthedocs.io/en/latest/configuration.html
ACCOUNT_EMAIL_REQUIRED = True
# https://django-allauth.readthedocs.io/en/latest/configuration.html
ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
# https://django-allauth.readthedocs.io/en/latest/configuration.html
ACCOUNT_ADAPTER = 'nobody_will_see_this.users.adapters.AccountAdapter'
# https://django-allauth.readthedocs.io/en/latest/configuration.html
SOCIALACCOUNT_ADAPTER = 'nobody_will_see_this.users.adapters.SocialAccountAdapter'
# CACHES
# ------------------------------------------------------------------------------
CACHES = {
'default': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': env('REDIS_URL'),
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
# Mimicing memcache behavior.
# http://niwinz.github.io/django-redis/latest/#_memcached_exceptions_behavior
'IGNORE_EXCEPTIONS': True,
}
}
}#REDIS_URL
# Your stuff...
# ------------------------------------------------------------------------------
# PRAW
# ------------------------------------------------------------------------------
# https://
PRAW_USERNAME = env('DJANGO_PRAW_USERNAME', default='')
PRAW_PASSWORD = env('DJANGO_PRAW_PASSWORD', default='')
PRAW_CLIENT_ID = env('DJANGO_PRAW_CLIENT_ID', default='')
PRAW_CLIENT_SECRET = env('DJANGO_PRAW_CLIENT_SECRET', default='')
PRAW_USER_AGENT = env('DJANGO_PRAW_USER_AGENT', default='')
|
"""
HTML Widget classes
"""
import copy
import datetime
import time
from itertools import chain
from urlparse import urljoin
from util import flatatt
from django.conf import settings
from django.utils.datastructures import MultiValueDict, MergeDict
from django.utils.html import escape, conditional_escape
from django.utils.translation import ugettext, ugettext_lazy
from django.utils.encoding import StrAndUnicode, force_unicode
from django.utils.safestring import mark_safe
from django.utils import datetime_safe, formats
__all__ = (
'Media', 'MediaDefiningClass', 'Widget', 'TextInput', 'PasswordInput',
'HiddenInput', 'MultipleHiddenInput', 'ClearableFileInput',
'FileInput', 'DateInput', 'DateTimeInput', 'TimeInput', 'Textarea', 'CheckboxInput',
'Select', 'NullBooleanSelect', 'SelectMultiple', 'RadioSelect',
'CheckboxSelectMultiple', 'MultiWidget',
'SplitDateTimeWidget',
)
MEDIA_TYPES = ('css','js')
class Media(StrAndUnicode):
def __init__(self, media=None, **kwargs):
if media:
media_attrs = media.__dict__
else:
media_attrs = kwargs
self._css = {}
self._js = []
for name in MEDIA_TYPES:
getattr(self, 'add_' + name)(media_attrs.get(name, None))
# Any leftover attributes must be invalid.
# if media_attrs != {}:
# raise TypeError("'class Media' has invalid attribute(s): %s" % ','.join(media_attrs.keys()))
def __unicode__(self):
return self.render()
def render(self):
return mark_safe(u'\n'.join(chain(*[getattr(self, 'render_' + name)() for name in MEDIA_TYPES])))
def render_js(self):
return [u'<script type="text/javascript" src="%s"></script>' % self.absolute_path(path) for path in self._js]
def render_css(self):
# To keep rendering order consistent, we can't just iterate over items().
# We need to sort the keys, and iterate over the sorted list.
media = self._css.keys()
media.sort()
return chain(*[
[u'<link href="%s" type="text/css" media="%s" rel="stylesheet" />' % (self.absolute_path(path), medium)
for path in self._css[medium]]
for medium in media])
def absolute_path(self, path, prefix=None):
if path.startswith(u'http://') or path.startswith(u'https://') or path.startswith(u'/'):
return path
if prefix is None:
if settings.STATIC_URL is None:
# backwards compatibility
prefix = settings.MEDIA_URL
else:
prefix = settings.STATIC_URL
return urljoin(prefix, path)
def __getitem__(self, name):
"Returns a Media object that only contains media of the given type"
if name in MEDIA_TYPES:
return Media(**{str(name): getattr(self, '_' + name)})
raise KeyError('Unknown media type "%s"' % name)
def add_js(self, data):
if data:
for path in data:
if path not in self._js:
self._js.append(path)
def add_css(self, data):
if data:
for medium, paths in data.items():
for path in paths:
if not self._css.get(medium) or path not in self._css[medium]:
self._css.setdefault(medium, []).append(path)
def __add__(self, other):
combined = Media()
for name in MEDIA_TYPES:
getattr(combined, 'add_' + name)(getattr(self, '_' + name, None))
getattr(combined, 'add_' + name)(getattr(other, '_' + name, None))
return combined
def media_property(cls):
def _media(self):
# Get the media property of the superclass, if it exists
if hasattr(super(cls, self), 'media'):
base = super(cls, self).media
else:
base = Media()
# Get the media definition for this class
definition = getattr(cls, 'Media', None)
if definition:
extend = getattr(definition, 'extend', True)
if extend:
if extend == True:
m = base
else:
m = Media()
for medium in extend:
m = m + base[medium]
return m + Media(definition)
else:
return Media(definition)
else:
return base
return property(_media)
class MediaDefiningClass(type):
"Metaclass for classes that can have media definitions"
def __new__(cls, name, bases, attrs):
new_class = super(MediaDefiningClass, cls).__new__(cls, name, bases,
attrs)
if 'media' not in attrs:
new_class.media = media_property(new_class)
return new_class
class Widget(object):
__metaclass__ = MediaDefiningClass
is_hidden = False # Determines whether this corresponds to an <input type="hidden">.
needs_multipart_form = False # Determines does this widget need multipart-encrypted form
is_localized = False
is_required = False
def __init__(self, attrs=None):
if attrs is not None:
self.attrs = attrs.copy()
else:
self.attrs = {}
def __deepcopy__(self, memo):
obj = copy.copy(self)
obj.attrs = self.attrs.copy()
memo[id(self)] = obj
return obj
def render(self, name, value, attrs=None):
"""
Returns this Widget rendered as HTML, as a Unicode string.
The 'value' given is not guaranteed to be valid input, so subclass
implementations should program defensively.
"""
raise NotImplementedError
def build_attrs(self, extra_attrs=None, **kwargs):
"Helper function for building an attribute dictionary."
attrs = dict(self.attrs, **kwargs)
if extra_attrs:
attrs.update(extra_attrs)
return attrs
def value_from_datadict(self, data, files, name):
"""
Given a dictionary of data and this widget's name, returns the value
of this widget. Returns None if it's not provided.
"""
return data.get(name, None)
def _has_changed(self, initial, data):
"""
Return True if data differs from initial.
"""
# For purposes of seeing whether something has changed, None is
# the same as an empty string, if the data or inital value we get
# is None, replace it w/ u''.
if data is None:
data_value = u''
else:
data_value = data
if initial is None:
initial_value = u''
else:
initial_value = initial
if force_unicode(initial_value) != force_unicode(data_value):
return True
return False
def id_for_label(self, id_):
"""
Returns the HTML ID attribute of this Widget for use by a <label>,
given the ID of the field. Returns None if no ID is available.
This hook is necessary because some widgets have multiple HTML
elements and, thus, multiple IDs. In that case, this method should
return an ID value that corresponds to the first ID in the widget's
tags.
"""
return id_
id_for_label = classmethod(id_for_label)
class Input(Widget):
"""
Base class for all <input> widgets (except type='checkbox' and
type='radio', which are special).
"""
input_type = None # Subclasses must define this.
def _format_value(self, value):
if self.is_localized:
return formats.localize_input(value)
return value
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if value != '':
# Only add the 'value' attribute if a value is non-empty.
final_attrs['value'] = force_unicode(self._format_value(value))
return mark_safe(u'<input%s />' % flatatt(final_attrs))
class TextInput(Input):
input_type = 'text'
class PasswordInput(Input):
input_type = 'password'
def __init__(self, attrs=None, render_value=False):
super(PasswordInput, self).__init__(attrs)
self.render_value = render_value
def render(self, name, value, attrs=None):
if not self.render_value: value=None
return super(PasswordInput, self).render(name, value, attrs)
class HiddenInput(Input):
input_type = 'hidden'
is_hidden = True
class MultipleHiddenInput(HiddenInput):
"""
A widget that handles <input type="hidden"> for fields that have a list
of values.
"""
def __init__(self, attrs=None, choices=()):
super(MultipleHiddenInput, self).__init__(attrs)
# choices can be any iterable
self.choices = choices
def render(self, name, value, attrs=None, choices=()):
if value is None: value = []
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
id_ = final_attrs.get('id', None)
inputs = []
for i, v in enumerate(value):
input_attrs = dict(value=force_unicode(v), **final_attrs)
if id_:
# An ID attribute was given. Add a numeric index as a suffix
# so that the inputs don't all have the same ID attribute.
input_attrs['id'] = '%s_%s' % (id_, i)
inputs.append(u'<input%s />' % flatatt(input_attrs))
return mark_safe(u'\n'.join(inputs))
def value_from_datadict(self, data, files, name):
if isinstance(data, (MultiValueDict, MergeDict)):
return data.getlist(name)
return data.get(name, None)
class FileInput(Input):
input_type = 'file'
needs_multipart_form = True
def render(self, name, value, attrs=None):
return super(FileInput, self).render(name, None, attrs=attrs)
def value_from_datadict(self, data, files, name):
"File widgets take data from FILES, not POST"
return files.get(name, None)
def _has_changed(self, initial, data):
if data is None:
return False
return True
FILE_INPUT_CONTRADICTION = object()
class ClearableFileInput(FileInput):
initial_text = ugettext_lazy('Currently')
input_text = ugettext_lazy('Change')
clear_checkbox_label = ugettext_lazy('Clear')
template_with_initial = u'%(initial_text)s: %(initial)s %(clear_template)s<br />%(input_text)s: %(input)s'
template_with_clear = u'%(clear)s <label for="%(clear_checkbox_id)s">%(clear_checkbox_label)s</label>'
def clear_checkbox_name(self, name):
"""
Given the name of the file input, return the name of the clear checkbox
input.
"""
return name + '-clear'
def clear_checkbox_id(self, name):
"""
Given the name of the clear checkbox input, return the HTML id for it.
"""
return name + '_id'
def render(self, name, value, attrs=None):
substitutions = {
'initial_text': self.initial_text,
'input_text': self.input_text,
'clear_template': '',
'clear_checkbox_label': self.clear_checkbox_label,
}
template = u'%(input)s'
substitutions['input'] = super(ClearableFileInput, self).render(name, value, attrs)
if value and hasattr(value, "url"):
template = self.template_with_initial
substitutions['initial'] = (u'<a href="%s">%s</a>'
% (escape(value.url),
escape(force_unicode(value))))
if not self.is_required:
checkbox_name = self.clear_checkbox_name(name)
checkbox_id = self.clear_checkbox_id(checkbox_name)
substitutions['clear_checkbox_name'] = conditional_escape(checkbox_name)
substitutions['clear_checkbox_id'] = conditional_escape(checkbox_id)
substitutions['clear'] = CheckboxInput().render(checkbox_name, False, attrs={'id': checkbox_id})
substitutions['clear_template'] = self.template_with_clear % substitutions
return mark_safe(template % substitutions)
def value_from_datadict(self, data, files, name):
upload = super(ClearableFileInput, self).value_from_datadict(data, files, name)
if not self.is_required and CheckboxInput().value_from_datadict(
data, files, self.clear_checkbox_name(name)):
if upload:
# If the user contradicts themselves (uploads a new file AND
# checks the "clear" checkbox), we return a unique marker
# object that FileField will turn into a ValidationError.
return FILE_INPUT_CONTRADICTION
# False signals to clear any existing value, as opposed to just None
return False
return upload
class Textarea(Widget):
def __init__(self, attrs=None):
# The 'rows' and 'cols' attributes are required for HTML correctness.
default_attrs = {'cols': '40', 'rows': '10'}
if attrs:
default_attrs.update(attrs)
super(Textarea, self).__init__(default_attrs)
def render(self, name, value, attrs=None):
if value is None: value = ''
final_attrs = self.build_attrs(attrs, name=name)
return mark_safe(u'<textarea%s>%s</textarea>' % (flatatt(final_attrs),
conditional_escape(force_unicode(value))))
class DateInput(Input):
input_type = 'text'
format = '%Y-%m-%d' # '2006-10-25'
def __init__(self, attrs=None, format=None):
super(DateInput, self).__init__(attrs)
if format:
self.format = format
self.manual_format = True
else:
self.format = formats.get_format('DATE_INPUT_FORMATS')[0]
self.manual_format = False
def _format_value(self, value):
if self.is_localized and not self.manual_format:
return formats.localize_input(value)
elif hasattr(value, 'strftime'):
value = datetime_safe.new_date(value)
return value.strftime(self.format)
return value
def _has_changed(self, initial, data):
# If our field has show_hidden_initial=True, initial will be a string
# formatted by HiddenInput using formats.localize_input, which is not
# necessarily the format used for this widget. Attempt to convert it.
try:
input_format = formats.get_format('DATE_INPUT_FORMATS')[0]
initial = datetime.date(*time.strptime(initial, input_format)[:3])
except (TypeError, ValueError):
pass
return super(DateInput, self)._has_changed(self._format_value(initial), data)
class DateTimeInput(Input):
input_type = 'text'
format = '%Y-%m-%d %H:%M:%S' # '2006-10-25 14:30:59'
def __init__(self, attrs=None, format=None):
super(DateTimeInput, self).__init__(attrs)
if format:
self.format = format
self.manual_format = True
else:
self.format = formats.get_format('DATETIME_INPUT_FORMATS')[0]
self.manual_format = False
def _format_value(self, value):
if self.is_localized and not self.manual_format:
return formats.localize_input(value)
elif hasattr(value, 'strftime'):
value = datetime_safe.new_datetime(value)
return value.strftime(self.format)
return value
def _has_changed(self, initial, data):
# If our field has show_hidden_initial=True, initial will be a string
# formatted by HiddenInput using formats.localize_input, which is not
# necessarily the format used for this widget. Attempt to convert it.
try:
input_format = formats.get_format('DATETIME_INPUT_FORMATS')[0]
initial = datetime.datetime(*time.strptime(initial, input_format)[:6])
except (TypeError, ValueError):
pass
return super(DateTimeInput, self)._has_changed(self._format_value(initial), data)
class TimeInput(Input):
input_type = 'text'
format = '%H:%M:%S' # '14:30:59'
def __init__(self, attrs=None, format=None):
super(TimeInput, self).__init__(attrs)
if format:
self.format = format
self.manual_format = True
else:
self.format = formats.get_format('TIME_INPUT_FORMATS')[0]
self.manual_format = False
def _format_value(self, value):
if self.is_localized and not self.manual_format:
return formats.localize_input(value)
elif hasattr(value, 'strftime'):
return value.strftime(self.format)
return value
def _has_changed(self, initial, data):
# If our field has show_hidden_initial=True, initial will be a string
# formatted by HiddenInput using formats.localize_input, which is not
# necessarily the format used for this widget. Attempt to convert it.
try:
input_format = formats.get_format('TIME_INPUT_FORMATS')[0]
initial = datetime.time(*time.strptime(initial, input_format)[3:6])
except (TypeError, ValueError):
pass
return super(TimeInput, self)._has_changed(self._format_value(initial), data)
class CheckboxInput(Widget):
def __init__(self, attrs=None, check_test=bool):
super(CheckboxInput, self).__init__(attrs)
# check_test is a callable that takes a value and returns True
# if the checkbox should be checked for that value.
self.check_test = check_test
def render(self, name, value, attrs=None):
final_attrs = self.build_attrs(attrs, type='checkbox', name=name)
try:
result = self.check_test(value)
except: # Silently catch exceptions
result = False
if result:
final_attrs['checked'] = 'checked'
if value not in ('', True, False, None):
# Only add the 'value' attribute if a value is non-empty.
final_attrs['value'] = force_unicode(value)
return mark_safe(u'<input%s />' % flatatt(final_attrs))
def value_from_datadict(self, data, files, name):
if name not in data:
# A missing value means False because HTML form submission does not
# send results for unselected checkboxes.
return False
value = data.get(name)
# Translate true and false strings to boolean values.
values = {'true': True, 'false': False}
if isinstance(value, basestring):
value = values.get(value.lower(), value)
return value
def _has_changed(self, initial, data):
# Sometimes data or initial could be None or u'' which should be the
# same thing as False.
return bool(initial) != bool(data)
class Select(Widget):
def __init__(self, attrs=None, choices=()):
super(Select, self).__init__(attrs)
# choices can be any iterable, but we may need to render this widget
# multiple times. Thus, collapse it into a list so it can be consumed
# more than once.
self.choices = list(choices)
def render(self, name, value, attrs=None, choices=()):
if value is None: value = ''
final_attrs = self.build_attrs(attrs, name=name)
output = [u'<select%s>' % flatatt(final_attrs)]
options = self.render_options(choices, [value])
if options:
output.append(options)
output.append(u'</select>')
return mark_safe(u'\n'.join(output))
def render_option(self, selected_choices, option_value, option_label):
option_value = force_unicode(option_value)
selected_html = (option_value in selected_choices) and u' selected="selected"' or ''
return u'<option value="%s"%s>%s</option>' % (
escape(option_value), selected_html,
conditional_escape(force_unicode(option_label)))
def render_options(self, choices, selected_choices):
# Normalize to strings.
selected_choices = set([force_unicode(v) for v in selected_choices])
output = []
for option_value, option_label in chain(self.choices, choices):
if isinstance(option_label, (list, tuple)):
output.append(u'<optgroup label="%s">' % escape(force_unicode(option_value)))
for option in option_label:
output.append(self.render_option(selected_choices, *option))
output.append(u'</optgroup>')
else:
output.append(self.render_option(selected_choices, option_value, option_label))
return u'\n'.join(output)
class NullBooleanSelect(Select):
"""
A Select Widget intended to be used with NullBooleanField.
"""
def __init__(self, attrs=None):
choices = ((u'1', ugettext('Unknown')), (u'2', ugettext('Yes')), (u'3', ugettext('No')))
super(NullBooleanSelect, self).__init__(attrs, choices)
def render(self, name, value, attrs=None, choices=()):
try:
value = {True: u'2', False: u'3', u'2': u'2', u'3': u'3'}[value]
except KeyError:
value = u'1'
return super(NullBooleanSelect, self).render(name, value, attrs, choices)
def value_from_datadict(self, data, files, name):
value = data.get(name, None)
return {u'2': True,
True: True,
'True': True,
u'3': False,
'False': False,
False: False}.get(value, None)
def _has_changed(self, initial, data):
# For a NullBooleanSelect, None (unknown) and False (No)
# are not the same
if initial is not None:
initial = bool(initial)
if data is not None:
data = bool(data)
return initial != data
class SelectMultiple(Select):
def render(self, name, value, attrs=None, choices=()):
if value is None: value = []
final_attrs = self.build_attrs(attrs, name=name)
output = [u'<select multiple="multiple"%s>' % flatatt(final_attrs)]
options = self.render_options(choices, value)
if options:
output.append(options)
output.append('</select>')
return mark_safe(u'\n'.join(output))
def value_from_datadict(self, data, files, name):
if isinstance(data, (MultiValueDict, MergeDict)):
return data.getlist(name)
return data.get(name, None)
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if len(initial) != len(data):
return True
initial_set = set([force_unicode(value) for value in initial])
data_set = set([force_unicode(value) for value in data])
return data_set != initial_set
class RadioInput(StrAndUnicode):
"""
An object used by RadioFieldRenderer that represents a single
<input type='radio'>.
"""
def __init__(self, name, value, attrs, choice, index):
self.name, self.value = name, value
self.attrs = attrs
self.choice_value = force_unicode(choice[0])
self.choice_label = force_unicode(choice[1])
self.index = index
def __unicode__(self):
if 'id' in self.attrs:
label_for = ' for="%s_%s"' % (self.attrs['id'], self.index)
else:
label_for = ''
choice_label = conditional_escape(force_unicode(self.choice_label))
return mark_safe(u'<label%s>%s %s</label>' % (label_for, self.tag(), choice_label))
def is_checked(self):
return self.value == self.choice_value
def tag(self):
if 'id' in self.attrs:
self.attrs['id'] = '%s_%s' % (self.attrs['id'], self.index)
final_attrs = dict(self.attrs, type='radio', name=self.name, value=self.choice_value)
if self.is_checked():
final_attrs['checked'] = 'checked'
return mark_safe(u'<input%s />' % flatatt(final_attrs))
class RadioFieldRenderer(StrAndUnicode):
"""
An object used by RadioSelect to enable customization of radio widgets.
"""
def __init__(self, name, value, attrs, choices):
self.name, self.value, self.attrs = name, value, attrs
self.choices = choices
def __iter__(self):
for i, choice in enumerate(self.choices):
yield RadioInput(self.name, self.value, self.attrs.copy(), choice, i)
def __getitem__(self, idx):
choice = self.choices[idx] # Let the IndexError propogate
return RadioInput(self.name, self.value, self.attrs.copy(), choice, idx)
def __unicode__(self):
return self.render()
def render(self):
"""Outputs a <ul> for this set of radio fields."""
return mark_safe(u'<ul>\n%s\n</ul>' % u'\n'.join([u'<li>%s</li>'
% force_unicode(w) for w in self]))
class RadioSelect(Select):
renderer = RadioFieldRenderer
def __init__(self, *args, **kwargs):
# Override the default renderer if we were passed one.
renderer = kwargs.pop('renderer', None)
if renderer:
self.renderer = renderer
super(RadioSelect, self).__init__(*args, **kwargs)
def get_renderer(self, name, value, attrs=None, choices=()):
"""Returns an instance of the renderer."""
if value is None: value = ''
str_value = force_unicode(value) # Normalize to string.
final_attrs = self.build_attrs(attrs)
choices = list(chain(self.choices, choices))
return self.renderer(name, str_value, final_attrs, choices)
def render(self, name, value, attrs=None, choices=()):
return self.get_renderer(name, value, attrs, choices).render()
def id_for_label(self, id_):
# RadioSelect is represented by multiple <input type="radio"> fields,
# each of which has a distinct ID. The IDs are made distinct by a "_X"
# suffix, where X is the zero-based index of the radio field. Thus,
# the label for a RadioSelect should reference the first one ('_0').
if id_:
id_ += '_0'
return id_
id_for_label = classmethod(id_for_label)
class CheckboxSelectMultiple(SelectMultiple):
def render(self, name, value, attrs=None, choices=()):
if value is None: value = []
has_id = attrs and 'id' in attrs
final_attrs = self.build_attrs(attrs, name=name)
output = [u'<ul>']
# Normalize to strings
str_values = set([force_unicode(v) for v in value])
for i, (option_value, option_label) in enumerate(chain(self.choices, choices)):
# If an ID attribute was given, add a numeric index as a suffix,
# so that the checkboxes don't all have the same ID attribute.
if has_id:
final_attrs = dict(final_attrs, id='%s_%s' % (attrs['id'], i))
label_for = u' for="%s"' % final_attrs['id']
else:
label_for = ''
cb = CheckboxInput(final_attrs, check_test=lambda value: value in str_values)
option_value = force_unicode(option_value)
rendered_cb = cb.render(name, option_value)
option_label = conditional_escape(force_unicode(option_label))
output.append(u'<li><label%s>%s %s</label></li>' % (label_for, rendered_cb, option_label))
output.append(u'</ul>')
return mark_safe(u'\n'.join(output))
def id_for_label(self, id_):
# See the comment for RadioSelect.id_for_label()
if id_:
id_ += '_0'
return id_
id_for_label = classmethod(id_for_label)
class MultiWidget(Widget):
"""
A widget that is composed of multiple widgets.
Its render() method is different than other widgets', because it has to
figure out how to split a single value for display in multiple widgets.
The ``value`` argument can be one of two things:
* A list.
* A normal value (e.g., a string) that has been "compressed" from
a list of values.
In the second case -- i.e., if the value is NOT a list -- render() will
first "decompress" the value into a list before rendering it. It does so by
calling the decompress() method, which MultiWidget subclasses must
implement. This method takes a single "compressed" value and returns a
list.
When render() does its HTML rendering, each value in the list is rendered
with the corresponding widget -- the first value is rendered in the first
widget, the second value is rendered in the second widget, etc.
Subclasses may implement format_output(), which takes the list of rendered
widgets and returns a string of HTML that formats them any way you'd like.
You'll probably want to use this class with MultiValueField.
"""
def __init__(self, widgets, attrs=None):
self.widgets = [isinstance(w, type) and w() or w for w in widgets]
super(MultiWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
if self.is_localized:
for widget in self.widgets:
widget.is_localized = self.is_localized
# value is a list of values, each corresponding to a widget
# in self.widgets.
if not isinstance(value, list):
value = self.decompress(value)
output = []
final_attrs = self.build_attrs(attrs)
id_ = final_attrs.get('id', None)
for i, widget in enumerate(self.widgets):
try:
widget_value = value[i]
except IndexError:
widget_value = None
if id_:
final_attrs = dict(final_attrs, id='%s_%s' % (id_, i))
output.append(widget.render(name + '_%s' % i, widget_value, final_attrs))
return mark_safe(self.format_output(output))
def id_for_label(self, id_):
# See the comment for RadioSelect.id_for_label()
if id_:
id_ += '_0'
return id_
id_for_label = classmethod(id_for_label)
def value_from_datadict(self, data, files, name):
return [widget.value_from_datadict(data, files, name + '_%s' % i) for i, widget in enumerate(self.widgets)]
def _has_changed(self, initial, data):
if initial is None:
initial = [u'' for x in range(0, len(data))]
else:
if not isinstance(initial, list):
initial = self.decompress(initial)
for widget, initial, data in zip(self.widgets, initial, data):
if widget._has_changed(initial, data):
return True
return False
def format_output(self, rendered_widgets):
"""
Given a list of rendered widgets (as strings), returns a Unicode string
representing the HTML for the whole lot.
This hook allows you to format the HTML design of the widgets, if
needed.
"""
return u''.join(rendered_widgets)
def decompress(self, value):
"""
Returns a list of decompressed values for the given compressed value.
The given value can be assumed to be valid, but not necessarily
non-empty.
"""
raise NotImplementedError('Subclasses must implement this method.')
def _get_media(self):
"Media for a multiwidget is the combination of all media of the subwidgets"
media = Media()
for w in self.widgets:
media = media + w.media
return media
media = property(_get_media)
def __deepcopy__(self, memo):
obj = super(MultiWidget, self).__deepcopy__(memo)
obj.widgets = copy.deepcopy(self.widgets)
return obj
class SplitDateTimeWidget(MultiWidget):
"""
A Widget that splits datetime input into two <input type="text"> boxes.
"""
date_format = DateInput.format
time_format = TimeInput.format
def __init__(self, attrs=None, date_format=None, time_format=None):
widgets = (DateInput(attrs=attrs, format=date_format),
TimeInput(attrs=attrs, format=time_format))
super(SplitDateTimeWidget, self).__init__(widgets, attrs)
def decompress(self, value):
if value:
return [value.date(), value.time().replace(microsecond=0)]
return [None, None]
class SplitHiddenDateTimeWidget(SplitDateTimeWidget):
"""
A Widget that splits datetime input into two <input type="hidden"> inputs.
"""
is_hidden = True
def __init__(self, attrs=None, date_format=None, time_format=None):
super(SplitHiddenDateTimeWidget, self).__init__(attrs, date_format, time_format)
for widget in self.widgets:
widget.input_type = 'hidden'
widget.is_hidden = True
|
#!/usr/bin/env python
# -*- coding: utf-8 *-*
import json
import glob
import re
import csv
import sys
def check_that_dict_has_equal_length(resultDict):
i = 0
for key, value in resultDict.iteritems():
if i == 0:
length = len(value)
if not length == len(value):
print("Key of unequal length: " + key)
return False
i = i + 1
return True
def read_query_of_runid(path, runName, runId, queryName, resultDict):
filename = runId + "-" + queryName
filepath = path + runName + "/" + filename
json_data = open(filepath, 'r')
resultList = []
line = json_data.readline()
jsonLine = json.loads(line[:-2], encoding="ascii")
#In case its a pure list of numbers or strings
if len(jsonLine) == 1 and \
isinstance(jsonLine[0], (int, float, long, complex, basestring)):
while line:
resultList.append(json.loads(line[:-2], encoding="ascii")[0])
line = json_data.readline()
resultDict[queryName] = resultList
return resultDict
wrapperCounter = 0
tempJsonLine = jsonLine
#Unwrapping the json lists, in case it is wrapped
while (tempJsonLine is not None) and len(tempJsonLine) == 1 and \
isinstance(tempJsonLine, (list)) and \
(not isinstance(tempJsonLine[0][0], (basestring))):
tempJsonLine = tempJsonLine[0]
wrapperCounter = wrapperCounter + 1
#Checking if it conforms to the standard and starting the dictionary.
if tempJsonLine is None:
return None
elif len(tempJsonLine[0]) == 2:
return read_standard_conform_key_value_pairs(tempJsonLine,
wrapperCounter, runId, queryName, json_data, resultDict)
else:
#print("Non-Standard: " + queryName)
raise NameError("Query returns in " + runId + " ," + queryName
+ " are not standard [Name, Value] for each tick!")
#return read_nonstandard_conform_key_value_pairs(tempJsonLine,\
# wrapperCounter,runId,queryName,json_data,resultDict)
def read_nonstandard_conform_key_value_pairs(tempJsonLine, wrapperCounter,
runId, queryName, json_data, resultDict):
subElementNumber = 0
for subelement in tempJsonLine:
for subsubelement in subelement:
if not len(subsubelement) == 2:
print(wrapperCounter)
print(subsubelement)
raise NameError("Query returns in " + runId + " ,"
+ queryName + " are not [Name, Value] for each tick!")
for content in subsubelement:
if isinstance(content, list):
raise NameError("Query returns in " + runId + " ,"
+ queryName + " are not [Name, Value] for each tick!")
resultDict[(queryName + "_"
+ subsubelement[0]).encode("ascii")] = [subsubelement[1]]
subElementNumber = subElementNumber + 1
#Reading the rest of the file
line = json_data.readline()
while line:
counter = 0
tempJsonLine = json.loads(line[:-2], encoding="ascii")
while counter < wrapperCounter:
tempJsonLine = tempJsonLine[0]
counter = counter + 1
subElementCounter = 0
for subelement in tempJsonLine:
for subsubelement in subelement:
if not len(subsubelement) == 2:
raise NameError("Query returns should be \
[Name, Value] for each tick!")
for content in subsubelement:
if isinstance(content, list):
raise NameError("Query returns should be \
[Name, Value] for each tick!")
resultDict[(queryName + "_"
+ subsubelement[0]).encode("ascii")].append(subsubelement[1])
subElementCounter = subElementCounter + 1
if not subElementCounter == subElementNumber:
raise NameError("Number of subresults is not consistent!:" + queryName + ", " + ''.join(str(e) for e in tempJsonLine))
line = json_data.readline()
#print(resultDict)
return resultDict
def read_standard_conform_key_value_pairs(tempJsonLine, wrapperCounter, runId,
queryName, json_data, resultDict):
subElementNumber = 0
for subelement in tempJsonLine:
if not len(subelement) == 2:
print(wrapperCounter)
print(subelement)
raise NameError("Query returns in " + runId + " ," + queryName
+ " are not [Name, Value] for each tick!")
for content in subelement:
if isinstance(content, list):
raise NameError("Query returns in " + runId + " ," + queryName
+ " are not [Name, Value] for each tick!")
resultDict[(queryName + "_"
+ subelement[0]).encode("ascii")] = [subelement[1]]
subElementNumber = subElementNumber + 1
#Reading the rest of the file
line = json_data.readline()
while line:
counter = 0
tempJsonLine = json.loads(line[:-2], encoding="ascii")
while counter < wrapperCounter:
tempJsonLine = tempJsonLine[0]
counter = counter + 1
subElementCounter = 0
for subelement in tempJsonLine:
if not len(subelement) == 2:
raise NameError("Query returns should be [Name, Value] for \
each tick!")
for content in subelement:
if isinstance(content, list):
raise NameError("Query returns should be [Name, Value]\
for each tick!")
resultDict[(queryName + "_" +
subelement[0]).encode("ascii")].append(subelement[1])
subElementCounter = subElementCounter + 1
if not subElementCounter == subElementNumber:
raise NameError("Number of subresults is not consistent! " + queryName + ", " + ''.join(str(e) for e in tempJsonLine))
line = json_data.readline()
#print(resultDict)
return resultDict
def find_query_names_in_directory_for_runId(path, runName, runId):
listOfQueryPaths = glob.glob(path + runName + "/" + runId + "-*")
listOfQueries = []
listOfTableQueries = []
for query in listOfQueryPaths:
m = re.search('(?<={0}{1}/{2}-).*'.format(path, runName, runId), query)
if not m.group(0).startswith("TABLE_"):
listOfQueries.append(m.group(0))
else:
listOfTableQueries.append(m.group(0))
listOfQueries.sort()
listOfQueryPaths.sort()
#return listOfQueries,listOfQueryPaths
return listOfQueries, listOfTableQueries
def find_runIds_based_on_logfiles_and_runname(path, runName):
listOfQueryPaths = glob.glob(path + runName + "/*.log")
listOfRunIds = []
for query in listOfQueryPaths:
m = re.search('(?<={0}{1}/).*'.format(path, runName), query)
n = re.sub("^(.*).log$", "\\1", m.group(0))
listOfRunIds.append(n)
return listOfRunIds
def read_runId_to_dictionary(path, runName, runId, ignoredQueries):
print("Reading " + runId)
queryNames, tableQueryNames = find_query_names_in_directory_for_runId(path,
runName, runId)
for ignoredQuery in ignoredQueries:
queryNames.remove(ignoredQuery)
resultDict = {}
for queryName in queryNames:
singleQueryResult = read_query_of_runid(path, runName, runId,
queryName, resultDict)
if singleQueryResult is not None:
resultDict.update(singleQueryResult)
if not check_that_dict_has_equal_length(resultDict):
raise NameError("Results of uneven time step length in ." + runName)
return resultDict
def write_first_runid_dictionary_to_csv(path, runName, runId,
resultDict, noOfTicks):
queryNames = ["tick", "runId"]
queryNames = set(queryNames)
queryNames.update(resultDict.keys())
with open(path + runName + ".csv", 'w') as csvfile:
csvwriter = csv.DictWriter(csvfile, fieldnames=queryNames)
headers = {}
for n in csvwriter.fieldnames:
headers[n] = n
csvwriter.writerow(headers)
i = 0
while i < noOfTicks:
singleTickDict = {}
for key, value in resultDict.iteritems():
singleTickDict.update({key: value[i]})
singleTickDict.update({"tick": i})
singleTickDict.update({"runId": runId})
csvwriter.writerow(singleTickDict)
i = i + 1
def write_following_runids_to_csv(path, runName, runId, resultDict, noOfTicks):
queryNames = {}
with open(path + runName + ".csv", 'rb') as csvtoupdate:
dictReader = csv.DictReader(csvtoupdate)
queryNames = dictReader.fieldnames
with open(path + runName + ".csv", 'a') as csvfile:
csvwriter = csv.DictWriter(csvfile, fieldnames=queryNames)
i = 0
while i < noOfTicks:
singleTickDict = {}
for key, value in resultDict.iteritems():
singleTickDict.update({key: value[i]})
singleTickDict.update({"tick": i})
singleTickDict.update({"runId": runId})
csvwriter.writerow(singleTickDict)
i = i + 1
def write_csv_for_run_name(path, runName, ignoredQueries):
runIds = find_runIds_based_on_logfiles_and_runname(path, runName)
totalRunIdNo = len(runIds)
j = 0
for runId in runIds:
resultDict = read_runId_to_dictionary(path, runName,
runId, ignoredQueries)
print
def write_first_runid_dictionary_to_csv(path, runName, runId,
resultDict, noOfTicks):
queryNames = ["tick", "runId"]
queryNames = set(queryNames)
queryNames.update(resultDict.keys())
with open(path + runName + ".csv", 'w') as csvfile:
csvwriter = csv.DictWriter(csvfile, fieldnames=queryNames)
headers = {}
for n in csvwriter.fieldnames:
headers[n] = n
csvwriter.writerow(headers)
i = 0
while i < noOfTicks:
singleTickDict = {}
for key, value in resultDict.iteritems():
singleTickDict.update({key: value[i]})
singleTickDict.update({"tick": i})
singleTickDict.update({"runId": runId})
csvwriter.writerow(singleTickDict)
i = i + 1
def write_following_runids_to_csv(path, runName, runId, resultDict, noOfTicks):
queryNames = {}
with open(path + runName + ".csv", 'rb') as csvtoupdate:
dictReader = csv.DictReader(csvtoupdate)
queryNames = dictReader.fieldnames
with open(path + runName + ".csv", 'a') as csvfile:
csvwriter = csv.DictWriter(csvfile, fieldnames=queryNames)
i = 0
while i < noOfTicks:
singleTickDict = {}
for key, value in resultDict.iteritems():
singleTickDict.update({key: value[i]})
singleTickDict.update({"tick": i})
singleTickDict.update({"runId": runId})
csvwriter.writerow(singleTickDict)
i = i + 1
def write_csv_for_run_name(path, runName, ignoredQueries):
runIds = find_runIds_based_on_logfiles_and_runname(path, runName)
totalRunIdNo = len(runIds)
j = 0
for runId in runIds:
resultDict = read_runId_to_dictionary(path, runName,
runId, ignoredQueries)
noOfTicks = len(resultDict.items()[1][1])
if j == 0:
write_first_runid_dictionary_to_csv(path, runName, runId,
resultDict, noOfTicks)
else:
write_following_runids_to_csv(path, runName, runId,
resultDict, noOfTicks)
j = j + 1
percentage = 1.0 * j / totalRunIdNo * 100
print((str(percentage) + "% done."))
def main(outputPath, runName, ignoredQueries):
if(not outputPath.endswith("/")):
outputPath = outputPath + "/"
print((find_runIds_based_on_logfiles_and_runname(outputPath, runName)))
write_csv_for_run_name(outputPath, runName, ignoredQueries)
if __name__ == "__main__":
if len(sys.argv[1:]) > 2:
main(sys.argv[1], sys.argv[2], sys.argv[3:])
elif len(sys.argv[1:]) == 2:
print(sys.argv[1],sys.argv[2])
main(sys.argv[1], sys.argv[2], [])
elif len(sys.argv[1:]) == 1:
f=sys.argv[1]
f=f[:-1]
print(f)
fs=f.split("/")
print(fs)
f=f.replace(fs[len(fs)-1],"")
print(f[len(fs)-1])
print(f,fs[-1],[])
main(f,fs[-1],[])
else:
print("This script needs to be called with: outputPath, \
runName (, ignoredQueries)")
|
import copy
from collections import OrderedDict
from dataclasses import dataclass
from typing import Optional
import torch
import warnings
from torch import nn
import torch.nn.functional as F
try:
from pytorch_quantization import nn as quant_nn
except ImportError as e:
warnings.warn(
"pytorch_quantization module not found, quantization will not be available"
)
quant_nn = None
# SqueezeAndExcitation {{{
class SqueezeAndExcitation(nn.Module):
def __init__(self, in_channels, squeeze, activation):
super(SqueezeAndExcitation, self).__init__()
self.squeeze = nn.Linear(in_channels, squeeze)
self.expand = nn.Linear(squeeze, in_channels)
self.activation = activation
self.sigmoid = nn.Sigmoid()
def forward(self, x):
return self._attention(x)
def _attention(self, x):
out = torch.mean(x, [2, 3])
out = self.squeeze(out)
out = self.activation(out)
out = self.expand(out)
out = self.sigmoid(out)
out = out.unsqueeze(2).unsqueeze(3)
return out
class SqueezeAndExcitationTRT(nn.Module):
def __init__(self, in_channels, squeeze, activation):
super(SqueezeAndExcitationTRT, self).__init__()
self.pooling = nn.AdaptiveAvgPool2d(1)
self.squeeze = nn.Conv2d(in_channels, squeeze, 1)
self.expand = nn.Conv2d(squeeze, in_channels, 1)
self.activation = activation
self.sigmoid = nn.Sigmoid()
def forward(self, x):
return self._attention(x)
def _attention(self, x):
out = self.pooling(x)
out = self.squeeze(out)
out = self.activation(out)
out = self.expand(out)
out = self.sigmoid(out)
return out
# }}}
# EMA {{{
class EMA:
def __init__(self, mu, module_ema):
self.mu = mu
self.module_ema = module_ema
def __call__(self, module, step=None):
if step is None:
mu = self.mu
else:
mu = min(self.mu, (1.0 + step) / (10 + step))
def strip_module(s: str) -> str:
return s
mesd = self.module_ema.state_dict()
with torch.no_grad():
for name, x in module.state_dict().items():
if name.endswith("num_batches_tracked"):
continue
n = strip_module(name)
mesd[n].mul_(mu)
mesd[n].add_((1.0 - mu) * x)
# }}}
# ONNXSiLU {{{
# Since torch.nn.SiLU is not supported in ONNX,
# it is required to use this implementation in exported model (15-20% more GPU memory is needed)
class ONNXSiLU(nn.Module):
def __init__(self, *args, **kwargs):
super(ONNXSiLU, self).__init__()
def forward(self, x):
return x * torch.sigmoid(x)
# }}}
class SequentialSqueezeAndExcitation(SqueezeAndExcitation):
def __init__(self, in_channels, squeeze, activation, quantized=False):
super().__init__(in_channels, squeeze, activation)
self.quantized = quantized
if quantized:
assert quant_nn is not None, "pytorch_quantization is not available"
self.mul_a_quantizer = quant_nn.TensorQuantizer(
quant_nn.QuantConv2d.default_quant_desc_input
)
self.mul_b_quantizer = quant_nn.TensorQuantizer(
quant_nn.QuantConv2d.default_quant_desc_input
)
else:
self.mul_a_quantizer = nn.Identity()
self.mul_b_quantizer = nn.Identity()
def forward(self, x):
out = self._attention(x)
if not self.quantized:
return out * x
else:
x_quant = self.mul_a_quantizer(out)
return x_quant * self.mul_b_quantizer(x)
class SequentialSqueezeAndExcitationTRT(SqueezeAndExcitationTRT):
def __init__(self, in_channels, squeeze, activation, quantized=False):
super().__init__(in_channels, squeeze, activation)
self.quantized = quantized
if quantized:
assert quant_nn is not None, "pytorch_quantization is not available"
self.mul_a_quantizer = quant_nn.TensorQuantizer(
quant_nn.QuantConv2d.default_quant_desc_input
)
self.mul_b_quantizer = quant_nn.TensorQuantizer(
quant_nn.QuantConv2d.default_quant_desc_input
)
else:
self.mul_a_quantizer = nn.Identity()
self.mul_b_quantizer = nn.Identity()
def forward(self, x):
out = self._attention(x)
if not self.quantized:
return out * x
else:
x_quant = self.mul_a_quantizer(out)
return x_quant * self.mul_b_quantizer(x)
class StochasticDepthResidual(nn.Module):
def __init__(self, survival_prob: float):
super().__init__()
self.survival_prob = survival_prob
self.register_buffer("mask", torch.ones(()), persistent=False)
def forward(self, residual: torch.Tensor, x: torch.Tensor) -> torch.Tensor:
if not self.training:
return torch.add(residual, other=x)
else:
with torch.no_grad():
F.dropout(
self.mask,
p=1 - self.survival_prob,
training=self.training,
inplace=False,
)
return torch.addcmul(residual, self.mask, x)
class Flatten(nn.Module):
def forward(self, x: torch.Tensor) -> torch.Tensor:
return x.squeeze(-1).squeeze(-1)
|
# Copyright 2015 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from perfkitbenchmarker import flags
flags.DEFINE_string('openstack_auth_url',
os.environ.get('OS_AUTH_URL', 'http://localhost:5000'),
('Url for Keystone authentication service, defaults to '
'$OS_AUTH_URL. Required for discovery of other OpenStack '
'service URLs.'))
flags.DEFINE_string('openstack_username',
os.getenv('OS_USERNAME', 'admin'),
'OpenStack login username, defaults to $OS_USERNAME.')
flags.DEFINE_string('openstack_tenant',
os.getenv('OS_TENANT_NAME', 'admin'),
'OpenStack tenant name, defaults to $OS_TENANT_NAME.')
flags.DEFINE_string('openstack_password_file',
os.getenv('OPENSTACK_PASSWORD_FILE',
'~/.config/openstack-password.txt'),
'Path to file containing the openstack password, '
'defaults to $OPENSTACK_PASSWORD_FILE. Alternatively, '
'setting the password itself in $OS_PASSWORD is also '
'supported.')
flags.DEFINE_string('openstack_nova_endpoint_type',
os.getenv('NOVA_ENDPOINT_TYPE', 'publicURL'),
'OpenStack Nova endpoint type, '
'defaults to $NOVA_ENDPOINT_TYPE.')
flags.DEFINE_string('openstack_public_network', None,
'Name of OpenStack public network')
flags.DEFINE_string('openstack_private_network', 'private',
'Name of OpenStack private network')
flags.DEFINE_boolean('openstack_config_drive', False,
'Add possibilities to get metadata from external drive')
flags.DEFINE_boolean('openstack_boot_from_volume', False,
'Boot from volume instead of an image')
flags.DEFINE_integer('openstack_volume_size', None,
'Size of the volume (GB)')
NONE = 'None'
flags.DEFINE_enum('openstack_scheduler_policy', NONE,
[NONE, 'affinity', 'anti-affinity'],
'Add possibility to use affinity or anti-affinity '
'policy in scheduling process')
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
GoGrid driver
"""
import time
import hashlib
import copy
from libcloud.utils.py3 import b
from libcloud.common.types import InvalidCredsError, LibcloudError
from libcloud.common.gogrid import GoGridConnection, BaseGoGridDriver
from libcloud.compute.providers import Provider
from libcloud.compute.types import NodeState
from libcloud.compute.base import Node, NodeDriver
from libcloud.compute.base import NodeSize, NodeImage, NodeLocation
STATE = {
"Starting": NodeState.PENDING,
"On": NodeState.RUNNING,
"On/Saving": NodeState.RUNNING,
"Off": NodeState.PENDING,
"Restarting": NodeState.REBOOTING,
"Saving": NodeState.PENDING,
"Restoring": NodeState.PENDING,
}
GOGRID_INSTANCE_TYPES = {
'512MB': {'id': '512MB',
'name': '512MB',
'ram': 512,
'disk': 30,
'bandwidth': None},
'1GB': {'id': '1GB',
'name': '1GB',
'ram': 1024,
'disk': 60,
'bandwidth': None},
'2GB': {'id': '2GB',
'name': '2GB',
'ram': 2048,
'disk': 120,
'bandwidth': None},
'4GB': {'id': '4GB',
'name': '4GB',
'ram': 4096,
'disk': 240,
'bandwidth': None},
'8GB': {'id': '8GB',
'name': '8GB',
'ram': 8192,
'disk': 480,
'bandwidth': None},
'16GB': {'id': '16GB',
'name': '16GB',
'ram': 16384,
'disk': 960,
'bandwidth': None},
'24GB': {'id': '24GB',
'name': '24GB',
'ram': 24576,
'disk': 960,
'bandwidth': None},
}
class GoGridNode(Node):
# Generating uuid based on public ip to get around missing id on
# create_node in gogrid api
#
# Used public ip since it is not mutable and specified at create time,
# so uuid of node should not change after add is completed
def get_uuid(self):
return hashlib.sha1(
b("%s:%s" % (self.public_ips, self.driver.type))
).hexdigest()
class GoGridNodeDriver(BaseGoGridDriver, NodeDriver):
"""
GoGrid node driver
"""
connectionCls = GoGridConnection
type = Provider.GOGRID
api_name = 'gogrid'
name = 'GoGrid'
website = 'http://www.gogrid.com/'
features = {"create_node": ["generates_password"]}
_instance_types = GOGRID_INSTANCE_TYPES
def __init__(self, *args, **kwargs):
"""
@inherits: :class:`NodeDriver.__init__`
"""
super(GoGridNodeDriver, self).__init__(*args, **kwargs)
def _get_state(self, element):
try:
return STATE[element['state']['name']]
except Exception:
pass
return NodeState.UNKNOWN
def _get_ip(self, element):
return element.get('ip').get('ip')
def _get_id(self, element):
return element.get('id')
def _to_node(self, element, password=None):
state = self._get_state(element)
ip = self._get_ip(element)
id = self._get_id(element)
n = GoGridNode(id=id,
name=element['name'],
state=state,
public_ips=[ip],
private_ips=[],
extra={'ram': element.get('ram').get('name'),
'description': element.get('description', '')},
driver=self.connection.driver)
if password:
n.extra['password'] = password
return n
def _to_image(self, element):
n = NodeImage(id=element['id'],
name=element['friendlyName'],
driver=self.connection.driver)
return n
def _to_images(self, object):
return [self._to_image(el)
for el in object['list']]
def _to_location(self, element):
location = NodeLocation(id=element['id'],
name=element['name'],
country="US",
driver=self.connection.driver)
return location
def _to_locations(self, object):
return [self._to_location(el)
for el in object['list']]
def list_images(self, location=None):
params = {}
if location is not None:
params["datacenter"] = location.id
images = self._to_images(
self.connection.request('/api/grid/image/list', params).object)
return images
def list_nodes(self):
"""
@inherits: :class:`NodeDriver.list_nodes`
:rtype: ``list`` of :class:`GoGridNode`
"""
passwords_map = {}
res = self._server_list()
try:
for password in self._password_list()['list']:
try:
passwords_map[password['server']['id']] = \
password['password']
except KeyError:
pass
except InvalidCredsError:
# some gogrid API keys don't have permission to access the
# password list.
pass
return [self._to_node(el, passwords_map.get(el.get('id')))
for el in res['list']]
def reboot_node(self, node):
"""
@inherits: :class:`NodeDriver.reboot_node`
:type node: :class:`GoGridNode`
"""
id = node.id
power = 'restart'
res = self._server_power(id, power)
if not res.success():
raise Exception(res.parse_error())
return True
def destroy_node(self, node):
"""
@inherits: :class:`NodeDriver.reboot_node`
:type node: :class:`GoGridNode`
"""
id = node.id
res = self._server_delete(id)
if not res.success():
raise Exception(res.parse_error())
return True
def _server_list(self):
return self.connection.request('/api/grid/server/list').object
def _password_list(self):
return self.connection.request('/api/support/password/list').object
def _server_power(self, id, power):
# power in ['start', 'stop', 'restart']
params = {'id': id, 'power': power}
return self.connection.request("/api/grid/server/power", params,
method='POST')
def _server_delete(self, id):
params = {'id': id}
return self.connection.request("/api/grid/server/delete", params,
method='POST')
def _get_first_ip(self, location=None):
ips = self.ex_list_ips(public=True, assigned=False, location=location)
try:
return ips[0].ip
except IndexError:
raise LibcloudError('No public unassigned IPs left',
GoGridNodeDriver)
def list_sizes(self, location=None):
sizes = []
for key, values in self._instance_types.items():
attributes = copy.deepcopy(values)
attributes.update({'price': self._get_size_price(size_id=key)})
sizes.append(NodeSize(driver=self.connection.driver, **attributes))
return sizes
def list_locations(self):
locations = self._to_locations(
self.connection.request('/api/common/lookup/list',
params={'lookup': 'ip.datacenter'}).object)
return locations
def ex_create_node_nowait(self, **kwargs):
"""Don't block until GoGrid allocates id for a node
but return right away with id == None.
The existence of this method is explained by the fact
that GoGrid assigns id to a node only few minutes after
creation.
:keyword name: String with a name for this new node (required)
:type name: ``str``
:keyword size: The size of resources allocated to this node .
(required)
:type size: :class:`NodeSize`
:keyword image: OS Image to boot on node. (required)
:type image: :class:`NodeImage`
:keyword ex_description: Description of a Node
:type ex_description: ``str``
:keyword ex_ip: Public IP address to use for a Node. If not
specified, first available IP address will be picked
:type ex_ip: ``str``
:rtype: :class:`GoGridNode`
"""
name = kwargs['name']
image = kwargs['image']
size = kwargs['size']
try:
ip = kwargs['ex_ip']
except KeyError:
ip = self._get_first_ip(kwargs.get('location'))
params = {'name': name,
'image': image.id,
'description': kwargs.get('ex_description', ''),
'server.ram': size.id,
'ip': ip}
object = self.connection.request('/api/grid/server/add',
params=params, method='POST').object
node = self._to_node(object['list'][0])
return node
def create_node(self, **kwargs):
"""Create a new GoGird node
@inherits: :class:`NodeDriver.create_node`
:keyword ex_description: Description of a Node
:type ex_description: ``str``
:keyword ex_ip: Public IP address to use for a Node. If not
specified, first available IP address will be picked
:type ex_ip: ``str``
:rtype: :class:`GoGridNode`
"""
node = self.ex_create_node_nowait(**kwargs)
timeout = 60 * 20
waittime = 0
interval = 2 * 60
while node.id is None and waittime < timeout:
nodes = self.list_nodes()
for i in nodes:
if i.public_ips[0] == node.public_ips[0] and i.id is not None:
return i
waittime += interval
time.sleep(interval)
if id is None:
raise Exception(
"Wasn't able to wait for id allocation for the node %s"
% str(node))
return node
def ex_save_image(self, node, name):
"""Create an image for node.
Please refer to GoGrid documentation to get info
how prepare a node for image creation:
http://wiki.gogrid.com/wiki/index.php/MyGSI
:keyword node: node to use as a base for image
:type node: :class:`GoGridNode`
:keyword name: name for new image
:type name: ``str``
:rtype: :class:`NodeImage`
"""
params = {'server': node.id,
'friendlyName': name}
object = self.connection.request('/api/grid/image/save', params=params,
method='POST').object
return self._to_images(object)[0]
def ex_edit_node(self, **kwargs):
"""Change attributes of a node.
:keyword node: node to be edited (required)
:type node: :class:`GoGridNode`
:keyword size: new size of a node (required)
:type size: :class:`NodeSize`
:keyword ex_description: new description of a node
:type ex_description: ``str``
:rtype: :class:`Node`
"""
node = kwargs['node']
size = kwargs['size']
params = {'id': node.id,
'server.ram': size.id}
if 'ex_description' in kwargs:
params['description'] = kwargs['ex_description']
object = self.connection.request('/api/grid/server/edit',
params=params).object
return self._to_node(object['list'][0])
def ex_edit_image(self, **kwargs):
"""Edit metadata of a server image.
:keyword image: image to be edited (required)
:type image: :class:`NodeImage`
:keyword public: should be the image public (required)
:type public: ``bool``
:keyword ex_description: description of the image (optional)
:type ex_description: ``str``
:keyword name: name of the image
:type name: ``str``
:rtype: :class:`NodeImage`
"""
image = kwargs['image']
public = kwargs['public']
params = {'id': image.id,
'isPublic': str(public).lower()}
if 'ex_description' in kwargs:
params['description'] = kwargs['ex_description']
if 'name' in kwargs:
params['friendlyName'] = kwargs['name']
object = self.connection.request('/api/grid/image/edit',
params=params).object
return self._to_image(object['list'][0])
def ex_list_ips(self, **kwargs):
"""Return list of IP addresses assigned to
the account.
:keyword public: set to True to list only
public IPs or False to list only
private IPs. Set to None or not specify
at all not to filter by type
:type public: ``bool``
:keyword assigned: set to True to list only addresses
assigned to servers, False to list unassigned
addresses and set to None or don't set at all
not no filter by state
:type assigned: ``bool``
:keyword location: filter IP addresses by location
:type location: :class:`NodeLocation`
:rtype: ``list`` of :class:`GoGridIpAddress`
"""
params = {}
if "public" in kwargs and kwargs["public"] is not None:
params["ip.type"] = {True: "Public",
False: "Private"}[kwargs["public"]]
if "assigned" in kwargs and kwargs["assigned"] is not None:
params["ip.state"] = {True: "Assigned",
False: "Unassigned"}[kwargs["assigned"]]
if "location" in kwargs and kwargs['location'] is not None:
params['datacenter'] = kwargs['location'].id
ips = self._to_ips(
self.connection.request('/api/grid/ip/list',
params=params).object)
return ips
|
#!/usr/bin/env python
# encoding: utf-8
"""Separate a path into its directory and base components.
"""
import os.path
for path in ['/one/two/three',
'/one/two/three/',
'/',
'.',
'']:
print '"%s" : "%s"' % (path, os.path.split(path))
|
import unittest
from BST.BasicBst import BasicBst
class TestBasicBst(unittest.TestCase):
def test_should_add_first_node_as_root(self):
bst = BasicBst()
bst[55] = 'this is cool'
bst[10] = 'test'
self.assertEqual(bst.root.value, 'this is cool')
def test_should_have_size_of_three(self):
bst = BasicBst()
bst[55] = 'this is cool'
bst[10] = 'test'
bst[15] = 'another test'
self.assertEqual(bst.size, 3)
def test_should_put_smaller_keys_on_left(self):
bst = BasicBst()
bst[55] = 'this is cool'
bst[10] = 'test'
self.assertIsNotNone(bst.root.left_child)
self.assertEqual(bst.root.left_child.key, 10)
def test_should_put_bigger_keys_on_right(self):
bst = BasicBst()
bst[50] = 'this is cool'
bst[55] = 'test'
self.assertEqual(bst.root.right_child.key, 55)
def test_should_get_node_by_key(self):
bst = BasicBst()
bst[55] = 'this is cool'
bst[10] = 'test'
bst[15] = 'another test'
self.assertIsNotNone(bst.get(15))
self.assertEqual(bst.get(15).value, 'another test')
def test_contains_some_key_should_return_true(self):
bst = BasicBst()
bst[55] = 'this is cool'
bst[10] = 'test'
bst[15] = 'another test'
self.assertTrue(15 in bst)
def test_not_contains(self):
bst = BasicBst()
bst[55] = 'this is cool'
bst[10] = 'test'
bst[15] = 'another test'
self.assertFalse(8 in bst)
# def test_delete_removes_node(self):
# bst = BasicBst()
# bst[55] = 'this is cool'
# bst[10] = 'test'
# bst[15] = 'another test'
def test_gets_successor_node(self):
bst = BasicBst()
bst[18] = 0
bst[5] = 0
bst[35] = 0
bst[2] = 0
bst[10] = 0
bst[4] = 0
bst[7] = 0
bst[8] = 0
self.assertEqual(bst.find_successor_node(bst.get(5)).key, 7)
def test_delete_node_with_no_children_and_is_root(self):
bst = BasicBst()
bst[18] = 0
bst.remove(bst[18])
self.assertIsNone(bst.root)
def test_delete_node_with_no_children_that_is_not_root(self):
bst = BasicBst()
bst[18] = 0
bst[16] = 0
bst[20] = 0
bst.remove(bst[16])
self.assertIsNone(bst.root.left_child)
def test_delete_node_with_one_child(self):
bst = BasicBst()
bst[18] = 0
bst[16] = 0
bst[20] = 0
bst[15] = 0
bst.remove(bst[16])
self.assertIsNone(bst[16])
self.assertEqual(bst.root.left_child.key, 15)
def test_delete_node_with_two_children(self):
bst = BasicBst()
bst[18] = 0
bst[5] = 0
bst[35] = 0
bst[2] = 0
bst[10] = 0
bst[4] = 0
bst[7] = 0
bst[8] = 0
bst.remove(bst[5])
self.assertEqual(bst.root.left_child.key, 7)
self.assertEqual(bst.root.left_child.left_child.key, 2)
self.assertEqual(bst.root.left_child.right_child.key, 11)
# three deletion possiblities
# one no children
# one child
# two children
|
import json
import re
from vkbottle.rule import FromMe
from vkbottle.user import Blueprint, Message
from idm_lp.database import Alias
from idm_lp.idm_api import IDMAPI, IDMException
from idm_lp.logger import logger_decorator
user = Blueprint(
name='aliases_blueprint'
)
async def send_signal(
message: Message,
message_text: str
):
try:
await IDMAPI.get_current().send_my_signal(
from_id=message.from_id, peer_id=message.peer_id,
conversation_message_id=message.conversation_message_id,
date=message.date, text=message_text, vk_message=json.loads(message.json())
)
except IDMException as ex:
await message.api.messages.send(
random_id=0,
peer_id=await message.api.user_id,
message=f"[IDM LP]\n⚠ Произошла ошибка при отправке сигнала на сервер IDM:\n💬 {ex}"
)
@user.on.message_handler(FromMe(), text=['<alias:alias> <signal>', '<alias:alias>', '<alias:alias>\n<signal>'])
@logger_decorator
async def duty_signal(message: Message, alias: Alias, **kwargs):
await send_signal(
message,
re.compile(alias.regexp, re.IGNORECASE).sub(
f".с {alias.command_to}", message.text
)
)
|
def display_strings(str_list, ch):
# Modify the code below
longest_length = 0
new_string_list = []
number_padding = len(str_list) - 1
# find longest string length
for string in str_list:
if len(string) > longest_length:
longest_length = len(string)
# move string to another list
# add number of spaces required at front, 'number_padding'
# and add number of characters at back accordingly 'number_character'
for string in str_list:
number_character = longest_length - len(string)
new_string = (' ' * number_padding) + string + (number_character * ch)
number_padding -= 1
new_string_list.append(new_string)
# print if not empty
for string in new_string_list:
print(string)
|
import json
from pprint import pformat
import requests
from simplejson import JSONDecodeError
from pytezos.logging import logger
def urljoin(*args):
return "/".join(map(lambda x: str(x).strip('/'), args))
def gen_error_variants(error_id) -> list:
chunks = error_id.split('.')
variants = [error_id]
if len(chunks) > 1:
variants.append(chunks[-2])
if len(chunks) > 2:
variants.append('.'.join(chunks[2:]))
return variants
class RpcError(Exception):
__handlers__ = {} # type: ignore
@classmethod
def __init_subclass__(cls, error_id=None, **kwargs):
super().__init_subclass__(**kwargs)
if isinstance(error_id, list):
for eid in error_id:
cls.__handlers__[eid] = cls
else:
assert error_id is not None
cls.__handlers__[error_id] = cls
@classmethod
def from_errors(cls, errors: list):
if not errors:
return RpcError('Unspecified error')
# FIXME: Only first error is being processed
error = errors[-1]
for key in gen_error_variants(error['id']):
if key in cls.__handlers__:
handler = cls.__handlers__[key]
return handler(error)
return RpcError(error)
@classmethod
def from_response(cls, res: requests.Response):
if res.headers.get('content-type') == 'application/json':
errors = res.json()
assert isinstance(errors, list)
return cls.from_errors(errors)
else:
return RpcError(res.text)
def __str__(self):
return pformat(self.args)
class RpcNode:
def __init__(self, uri):
self.uri = uri
self._session = requests.Session()
def __repr__(self):
res = [
super(RpcNode, self).__repr__(),
'\nNode address',
self.uri
]
return '\n'.join(res)
def request(self, method, path, **kwargs) -> requests.Response:
logger.debug('>>>>> %s %s\n%s', method, path, json.dumps(kwargs, indent=4))
res = self._session.request(
method=method,
url=urljoin(self.uri, path),
headers={
'content-type': 'application/json',
'user-agent': 'PyTezos'
},
**kwargs
)
if res.status_code == 404:
logger.debug('<<<<< %s\n%s', res.status_code, res.text)
raise RpcError(f'Not found: {path}')
elif res.status_code != 200:
logger.debug('<<<<< %s\n%s', res.status_code, pformat(res.text, indent=4))
raise RpcError.from_response(res)
logger.debug('<<<<< %s\n%s', res.status_code, json.dumps(res.json(), indent=4))
return res
def get(self, path, params=None, timeout=None):
return self.request('GET', path, params=params, timeout=timeout).json()
def post(self, path, params=None, json=None):
response = self.request('POST', path, params=params, json=json)
try:
return response.json()
except JSONDecodeError:
return response.text
def delete(self, path, params=None):
return self.request('DELETE', path, params=params).json()
def put(self, path, params=None):
return self.request('PUT', path, params=params).json()
class RpcMultiNode(RpcNode):
def __init__(self, uri):
super(RpcMultiNode, self).__init__(uri=uri)
if not isinstance(uri, list):
self.uri = [uri]
self.nodes = list(map(lambda x: RpcNode(x), self.uri))
self._next_i = 0
def __repr__(self):
res = [
super(RpcNode, self).__repr__(),
f'\nNode addresses',
*self.uri
]
return '\n'.join(res)
def request(self, method, path, **kwargs) -> requests.Response:
assert self._next_i < len(self.nodes)
res = self.nodes[self._next_i].request(method, path, **kwargs)
self._next_i = (self._next_i + 1) % len(self.nodes)
return res
|
import sys
class AppTestCodecs:
spaceconfig = {
"usemodules": ['unicodedata', 'struct', 'binascii'],
}
def test_register_noncallable(self):
import _codecs
raises(TypeError, _codecs.register, 1)
def test_bigU_codecs(self):
u = u'\U00010001\U00020002\U00030003\U00040004\U00050005'
for encoding in ('utf-8', 'utf-16', 'utf-16-le', 'utf-16-be',
'utf-32', 'utf-32-le', 'utf-32-be',
'raw_unicode_escape',
'unicode_escape', 'unicode_internal'):
assert unicode(u.encode(encoding),encoding) == u
def test_ucs4(self):
x = u'\U00100000'
y = x.encode("raw-unicode-escape").decode("raw-unicode-escape")
assert x == y
def test_named_unicode(self):
assert unicode('\\N{SPACE}','unicode-escape') == u" "
raises( UnicodeDecodeError, unicode,'\\N{SPACE','unicode-escape')
raises( UnicodeDecodeError, unicode,'\\NSPACE}','unicode-escape')
raises( UnicodeDecodeError, unicode,'\\NSPACE','unicode-escape')
raises( UnicodeDecodeError, unicode,'\\N','unicode-escape')
assert unicode('\\N{SPACE}\\N{SPACE}','unicode-escape') == u" "
assert unicode('\\N{SPACE}a\\N{SPACE}','unicode-escape') == u" a "
assert "\\N{foo}xx".decode("unicode-escape", "ignore") == u"xx"
assert 1 <= len(u"\N{CJK UNIFIED IDEOGRAPH-20000}") <= 2
def test_literals(self):
raises(SyntaxError, eval, 'u\'\\Uffffffff\'')
def test_insecure_pickle(self):
import pickle
insecure = ["abc", "2 + 2", # not quoted
#"'abc' + 'def'", # not a single quoted string
"'abc", # quote is not closed
"'abc\"", # open quote and close quote don't match
"'abc' ?", # junk after close quote
"'\\'", # trailing backslash
# some tests of the quoting rules
#"'abc\"\''",
#"'\\\\a\'\'\'\\\'\\\\\''",
]
for s in insecure:
buf = "S" + s + "\012p0\012."
raises (ValueError, pickle.loads, buf)
def test_unicodedecodeerror(self):
assert str(UnicodeDecodeError(
"ascii", "g\xfcrk", 1, 2, "ouch")) == "'ascii' codec can't decode byte 0xfc in position 1: ouch"
assert str(UnicodeDecodeError(
"ascii", "g\xfcrk", 1, 3, "ouch")) == "'ascii' codec can't decode bytes in position 1-2: ouch"
def test_unicodetranslateerror(self):
import sys
assert str(UnicodeTranslateError(
u"g\xfcrk", 1, 2, "ouch"))== "can't translate character u'\\xfc' in position 1: ouch"
assert str(UnicodeTranslateError(
u"g\u0100rk", 1, 2, "ouch"))== "can't translate character u'\\u0100' in position 1: ouch"
assert str(UnicodeTranslateError(
u"g\uffffrk", 1, 2, "ouch"))== "can't translate character u'\\uffff' in position 1: ouch"
if sys.maxunicode > 0xffff and len(unichr(0x10000)) == 1:
assert str(UnicodeTranslateError(
u"g\U00010000rk", 1, 2, "ouch"))== "can't translate character u'\\U00010000' in position 1: ouch"
assert str(UnicodeTranslateError(
u"g\xfcrk", 1, 3, "ouch"))=="can't translate characters in position 1-2: ouch"
def test_unicodeencodeerror(self):
import sys
assert str(UnicodeEncodeError(
"ascii", u"g\xfcrk", 1, 2, "ouch"))=="'ascii' codec can't encode character u'\\xfc' in position 1: ouch"
assert str(UnicodeEncodeError(
"ascii", u"g\xfcrk", 1, 4, "ouch"))== "'ascii' codec can't encode characters in position 1-3: ouch"
assert str(UnicodeEncodeError(
"ascii", u"\xfcx", 0, 1, "ouch"))=="'ascii' codec can't encode character u'\\xfc' in position 0: ouch"
assert str(UnicodeEncodeError(
"ascii", u"\u0100x", 0, 1, "ouch"))=="'ascii' codec can't encode character u'\\u0100' in position 0: ouch"
assert str(UnicodeEncodeError(
"ascii", u"\uffffx", 0, 1, "ouch"))=="'ascii' codec can't encode character u'\\uffff' in position 0: ouch"
if sys.maxunicode > 0xffff and len(unichr(0x10000)) == 1:
assert str(UnicodeEncodeError(
"ascii", u"\U00010000x", 0, 1, "ouch")) =="'ascii' codec can't encode character u'\\U00010000' in position 0: ouch"
def test_indexerror(self):
test = "\\" # trailing backslash
raises (ValueError, test.decode,'string-escape')
def test_charmap_decode(self):
from _codecs import charmap_decode
import sys
assert charmap_decode('', 'strict', 'blablabla') == ('', 0)
assert charmap_decode('xxx') == ('xxx', 3)
assert charmap_decode('xxx', 'strict', {ord('x'): u'XX'}) == ('XXXXXX', 3)
map = tuple([unichr(i) for i in range(256)])
assert charmap_decode('xxx\xff', 'strict', map) == (u'xxx\xff', 4)
exc = raises(TypeError, charmap_decode, '\xff', "strict", {0xff: 'a'})
assert str(exc.value) == "character mapping must return integer, None or unicode"
raises(TypeError, charmap_decode, '\xff', "strict", {0xff: 0x110000})
assert (charmap_decode("\x00\x01\x02", "strict",
{0: 0x10FFFF, 1: ord('b'), 2: ord('c')}) ==
(u"\U0010FFFFbc", 3))
assert (charmap_decode("\x00\x01\x02", "strict",
{0: u'\U0010FFFF', 1: u'b', 2: u'c'}) ==
(u"\U0010FFFFbc", 3))
assert charmap_decode('\xff', "strict", {0xff: 0xd800}) == (u'\ud800', 1)
def test_escape_decode(self):
from _codecs import unicode_escape_decode as decode
assert decode('\\\x80') == (u'\\\x80', 2)
def test_escape_decode_errors(self):
from _codecs import escape_decode as decode
raises(ValueError, decode, br"\x")
raises(ValueError, decode, br"[\x]")
assert decode(br"[\x]\x", "ignore") == (b"[]", 6)
assert decode(br"[\x]\x", "replace") == (b"[?]?", 6)
raises(ValueError, decode, br"\x0")
raises(ValueError, decode, br"[\x0]")
assert decode(br"[\x0]\x0", "ignore") == (b"[]", 8)
assert decode(br"[\x0]\x0", "replace") == (b"[?]?", 8)
def test_unicode_escape(self):
from _codecs import unicode_escape_encode, unicode_escape_decode
assert unicode_escape_encode(u'abc') == (u'abc'.encode('unicode_escape'), 3)
assert unicode_escape_decode('abc') == (u'abc'.decode('unicode_escape'), 3)
assert unicode_escape_decode('\\x61\\x62\\x63') == (u'abc', 12)
def test_unicode_replace(self):
# CPython #8271: during the decoding of an invalid UTF-8 byte sequence,
# only the start byte and the continuation byte(s) are now considered
# invalid, instead of the number of bytes specified by the start byte.
# See http://www.unicode.org/versions/Unicode5.2.0/ch03.pdf (page 42,
# table 3-8, Row 2) for more information about the algorithm used.
FFFD = u'\ufffd'
sequences = [
# invalid start bytes
(b'\x80', FFFD), # continuation byte
(b'\x80\x80', FFFD*2), # 2 continuation bytes
(b'\xc0', FFFD),
(b'\xc0\xc0', FFFD*2),
(b'\xc1', FFFD),
(b'\xc1\xc0', FFFD*2),
(b'\xc0\xc1', FFFD*2),
# with start byte of a 2-byte sequence
(b'\xc2', FFFD), # only the start byte
(b'\xc2\xc2', FFFD*2), # 2 start bytes
(b'\xc2\xc2\xc2', FFFD*3), # 3 start bytes
(b'\xc2\x41', FFFD+'A'), # invalid continuation byte
# with start byte of a 3-byte sequence
(b'\xe1', FFFD), # only the start byte
(b'\xe1\xe1', FFFD*2), # 2 start bytes
(b'\xe1\xe1\xe1', FFFD*3), # 3 start bytes
(b'\xe1\xe1\xe1\xe1', FFFD*4), # 4 start bytes
(b'\xe1\x80', FFFD), # only 1 continuation byte
(b'\xe1\x41', FFFD+'A'), # invalid continuation byte
(b'\xe1\x41\x80', FFFD+'A'+FFFD), # invalid cb followed by valid cb
(b'\xe1\x41\x41', FFFD+'AA'), # 2 invalid continuation bytes
(b'\xe1\x80\x41', FFFD+'A'), # only 1 valid continuation byte
(b'\xe1\x80\xe1\x41', FFFD*2+'A'), # 1 valid and the other invalid
(b'\xe1\x41\xe1\x80', FFFD+'A'+FFFD), # 1 invalid and the other valid
# with start byte of a 4-byte sequence
(b'\xf1', FFFD), # only the start byte
(b'\xf1\xf1', FFFD*2), # 2 start bytes
(b'\xf1\xf1\xf1', FFFD*3), # 3 start bytes
(b'\xf1\xf1\xf1\xf1', FFFD*4), # 4 start bytes
(b'\xf1\xf1\xf1\xf1\xf1', FFFD*5), # 5 start bytes
(b'\xf1\x80', FFFD), # only 1 continuation bytes
(b'\xf1\x80\x80', FFFD), # only 2 continuation bytes
(b'\xf1\x80\x41', FFFD+'A'), # 1 valid cb and 1 invalid
(b'\xf1\x80\x41\x41', FFFD+'AA'), # 1 valid cb and 1 invalid
(b'\xf1\x80\x80\x41', FFFD+'A'), # 2 valid cb and 1 invalid
(b'\xf1\x41\x80', FFFD+'A'+FFFD), # 1 invalid cv and 1 valid
(b'\xf1\x41\x80\x80', FFFD+'A'+FFFD*2), # 1 invalid cb and 2 invalid
(b'\xf1\x41\x80\x41', FFFD+'A'+FFFD+'A'), # 2 invalid cb and 1 invalid
(b'\xf1\x41\x41\x80', FFFD+'AA'+FFFD), # 1 valid cb and 1 invalid
(b'\xf1\x41\xf1\x80', FFFD+'A'+FFFD),
(b'\xf1\x41\x80\xf1', FFFD+'A'+FFFD*2),
(b'\xf1\xf1\x80\x41', FFFD*2+'A'),
(b'\xf1\x41\xf1\xf1', FFFD+'A'+FFFD*2),
# with invalid start byte of a 4-byte sequence (rfc2279)
(b'\xf5', FFFD), # only the start byte
(b'\xf5\xf5', FFFD*2), # 2 start bytes
(b'\xf5\x80', FFFD*2), # only 1 continuation byte
(b'\xf5\x80\x80', FFFD*3), # only 2 continuation byte
(b'\xf5\x80\x80\x80', FFFD*4), # 3 continuation bytes
(b'\xf5\x80\x41', FFFD*2+'A'), # 1 valid cb and 1 invalid
(b'\xf5\x80\x41\xf5', FFFD*2+'A'+FFFD),
(b'\xf5\x41\x80\x80\x41', FFFD+'A'+FFFD*2+'A'),
# with invalid start byte of a 5-byte sequence (rfc2279)
(b'\xf8', FFFD), # only the start byte
(b'\xf8\xf8', FFFD*2), # 2 start bytes
(b'\xf8\x80', FFFD*2), # only one continuation byte
(b'\xf8\x80\x41', FFFD*2 + 'A'), # 1 valid cb and 1 invalid
(b'\xf8\x80\x80\x80\x80', FFFD*5), # invalid 5 bytes seq with 5 bytes
# with invalid start byte of a 6-byte sequence (rfc2279)
(b'\xfc', FFFD), # only the start byte
(b'\xfc\xfc', FFFD*2), # 2 start bytes
(b'\xfc\x80\x80', FFFD*3), # only 2 continuation bytes
(b'\xfc\x80\x80\x80\x80\x80', FFFD*6), # 6 continuation bytes
# invalid start byte
(b'\xfe', FFFD),
(b'\xfe\x80\x80', FFFD*3),
# other sequences
(b'\xf1\x80\x41\x42\x43', u'\ufffd\x41\x42\x43'),
(b'\xf1\x80\xff\x42\x43', u'\ufffd\ufffd\x42\x43'),
(b'\xf1\x80\xc2\x81\x43', u'\ufffd\x81\x43'),
(b'\x61\xF1\x80\x80\xE1\x80\xC2\x62\x80\x63\x80\xBF\x64',
u'\x61\uFFFD\uFFFD\uFFFD\x62\uFFFD\x63\uFFFD\uFFFD\x64'),
]
for n, (seq, res) in enumerate(sequences):
raises(UnicodeDecodeError, seq.decode, 'utf-8', 'strict')
uni = seq.decode('utf-8', 'replace')
assert uni == res
uni = (seq+b'b').decode('utf-8', 'replace')
assert uni == res+'b'
uni = seq.decode('utf-8', 'ignore')
assert uni == res.replace(u'\uFFFD', '')
class AppTestPartialEvaluation:
spaceconfig = dict(usemodules=['array',])
if sys.platform == 'win32':
spaceconfig['usemodules'].append('_winreg')
def test_partial_utf8(self):
import _codecs
encoding = 'utf-8'
check_partial = [
u"\x00",
u"\x00",
u"\x00\xff",
u"\x00\xff",
u"\x00\xff\u07ff",
u"\x00\xff\u07ff",
u"\x00\xff\u07ff",
u"\x00\xff\u07ff\u0800",
u"\x00\xff\u07ff\u0800",
u"\x00\xff\u07ff\u0800",
u"\x00\xff\u07ff\u0800\uffff",
u"\x00\xff\u07ff\u0800\uffff",
u"\x00\xff\u07ff\u0800\uffff",
u"\x00\xff\u07ff\u0800\uffff",
u"\x00\xff\u07ff\u0800\uffff\U00010000",
]
buffer = ''
result = u""
for (c, partialresult) in zip(u"\x00\xff\u07ff\u0800\uffff\U00010000".encode(encoding), check_partial):
buffer += c
res = _codecs.utf_8_decode(buffer,'strict',False)
if res[1] >0 :
buffer = ''
result += res[0]
assert result == partialresult
def test_partial_utf16(self):
import _codecs
encoding = 'utf-16'
check_partial = [
u"", # first byte of BOM read
u"", # second byte of BOM read => byteorder known
u"",
u"\x00",
u"\x00",
u"\x00\xff",
u"\x00\xff",
u"\x00\xff\u0100",
u"\x00\xff\u0100",
u"\x00\xff\u0100\uffff",
u"\x00\xff\u0100\uffff",
u"\x00\xff\u0100\uffff",
u"\x00\xff\u0100\uffff",
u"\x00\xff\u0100\uffff\U00010000",
]
buffer = ''
result = u""
for (c, partialresult) in zip(u"\x00\xff\u0100\uffff\U00010000".encode(encoding), check_partial):
buffer += c
res = _codecs.utf_16_decode(buffer,'strict',False)
if res[1] >0 :
buffer = ''
result += res[0]
assert result == partialresult
def test_bug1098990_a(self):
import codecs, StringIO
self.encoding = 'utf-8'
s1 = u"xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy\r\n"
s2 = u"offending line: ladfj askldfj klasdj fskla dfzaskdj fasklfj laskd fjasklfzzzzaa%whereisthis!!!\r\n"
s3 = u"next line.\r\n"
s = (s1+s2+s3).encode(self.encoding)
stream = StringIO.StringIO(s)
reader = codecs.getreader(self.encoding)(stream)
assert reader.readline() == s1
assert reader.readline() == s2
assert reader.readline() == s3
assert reader.readline() == u""
def test_bug1098990_b(self):
import codecs, StringIO
self.encoding = 'utf-8'
s1 = u"aaaaaaaaaaaaaaaaaaaaaaaa\r\n"
s2 = u"bbbbbbbbbbbbbbbbbbbbbbbb\r\n"
s3 = u"stillokay:bbbbxx\r\n"
s4 = u"broken!!!!badbad\r\n"
s5 = u"againokay.\r\n"
s = (s1+s2+s3+s4+s5).encode(self.encoding)
stream = StringIO.StringIO(s)
reader = codecs.getreader(self.encoding)(stream)
assert reader.readline() == s1
assert reader.readline() == s2
assert reader.readline() == s3
assert reader.readline() == s4
assert reader.readline() == s5
assert reader.readline() == u""
def test_seek_utf16le(self):
# all codecs should be able to encode these
import codecs, StringIO
encoding = 'utf-16-le'
s = u"%s\n%s\n" % (10*u"abc123", 10*u"def456")
reader = codecs.getreader(encoding)(StringIO.StringIO(s.encode(encoding)))
for t in xrange(5):
# Test that calling seek resets the internal codec state and buffers
reader.seek(0, 0)
line = reader.readline()
assert s[:len(line)] == line
def test_unicode_internal_encode(self):
import sys
class U(unicode):
pass
enc = U(u"a").encode("unicode_internal")
if sys.maxunicode == 65535: # UCS2 build
if sys.byteorder == "big":
assert enc == "\x00a"
else:
assert enc == "a\x00"
elif len(u"\U00010098") == 1:
# UCS4 build on a UCS4 CPython
enc2 = u"\U00010098".encode("unicode_internal")
if sys.byteorder == "big":
assert enc == "\x00\x00\x00a"
assert enc2 == "\x00\x01\x00\x98"
else:
assert enc == "a\x00\x00\x00"
assert enc2 == "\x98\x00\x01\x00"
else:
# UCS4 build on a UCS2 CPython
if sys.byteorder == "big":
assert enc == "\x00\x00\x00a"
else:
assert enc == "a\x00\x00\x00"
def test_unicode_internal_decode(self):
import sys, _codecs, array
if sys.maxunicode == 65535: # UCS2 build
if sys.byteorder == "big":
bytes = "\x00a"
else:
bytes = "a\x00"
else: # UCS4 build
if sys.byteorder == "big":
bytes = "\x00\x00\x00a"
bytes2 = "\x00\x01\x00\x98"
else:
bytes = "a\x00\x00\x00"
bytes2 = "\x98\x00\x01\x00"
assert bytes2.decode("unicode_internal") == u"\U00010098"
assert bytes.decode("unicode_internal") == u"a"
assert _codecs.unicode_internal_decode(array.array('c', bytes))[0] == u"a"
if '__pypy__' in sys.modules:
assert _codecs.unicode_internal_decode(memoryview(bytes))[0] == u"a"
def test_raw_unicode_escape(self):
assert unicode("\u0663", "raw-unicode-escape") == u"\u0663"
assert u"\u0663".encode("raw-unicode-escape") == "\u0663"
def test_escape_decode(self):
test = 'a\n\\b\x00c\td\u2045'.encode('string_escape')
assert test.decode('string_escape') =='a\n\\b\x00c\td\u2045'
assert '\\077'.decode('string_escape') == '?'
assert '\\100'.decode('string_escape') == '@'
assert '\\253'.decode('string_escape') == chr(0253)
assert '\\312'.decode('string_escape') == chr(0312)
def test_escape_decode_wrap_around(self):
assert '\\400'.decode('string_escape') == chr(0)
def test_escape_decode_ignore_invalid(self):
assert '\\9'.decode('string_escape') == '\\9'
assert '\\01'.decode('string_escape') == chr(01)
assert '\\0f'.decode('string_escape') == chr(0) + 'f'
assert '\\08'.decode('string_escape') == chr(0) + '8'
def test_escape_decode_errors(self):
raises(ValueError, br"\x".decode, 'string_escape')
raises(ValueError, br"[\x]".decode, 'string_escape')
raises(ValueError, br"\x0".decode, 'string_escape')
raises(ValueError, br"[\x0]".decode, 'string_escape')
def test_unicode_escape_decode_errors(self):
from _codecs import unicode_escape_decode, raw_unicode_escape_decode
for decode in [unicode_escape_decode, raw_unicode_escape_decode]:
for c, d in ('u', 4), ('U', 4):
for i in range(d):
raises(UnicodeDecodeError, decode, "\\" + c + "0"*i)
raises(UnicodeDecodeError, decode, "[\\" + c + "0"*i + "]")
data = "[\\" + c + "0"*i + "]\\" + c + "0"*i
assert decode(data, "ignore") == (u"[]", len(data))
assert decode(data, "replace") == (u"[\ufffd]\ufffd", len(data))
raises(UnicodeDecodeError, decode, r"\U00110000")
assert decode(r"\U00110000", "ignore") == (u"", 10)
assert decode(r"\U00110000", "replace") == (u"\ufffd", 10)
exc = raises(UnicodeDecodeError, unicode_escape_decode, b"\u1z32z3", 'strict')
assert str(exc.value) == r"'unicodeescape' codec can't decode bytes in position 0-2: truncated \uXXXX escape"
exc = raises(UnicodeDecodeError, raw_unicode_escape_decode, b"\u1z32z3", 'strict')
assert str(exc.value) == r"'rawunicodeescape' codec can't decode bytes in position 0-2: truncated \uXXXX"
exc = raises(UnicodeDecodeError, raw_unicode_escape_decode, b"\U1z32z3", 'strict')
assert str(exc.value) == r"'rawunicodeescape' codec can't decode bytes in position 0-2: truncated \uXXXX"
def test_escape_encode(self):
assert '"'.encode('string_escape') == '"'
assert "'".encode('string_escape') == "\\'"
def test_decode_utf8_different_case(self):
constant = u"a"
assert constant.encode("utf-8") == constant.encode("UTF-8")
def test_codec_wrong_result(self):
import _codecs
def search_function(encoding):
def f(input, errors="strict"):
return 42
if encoding == 'test.mytestenc':
return (f, f, None, None)
return None
_codecs.register(search_function)
raises(TypeError, b"hello".decode, "test.mytestenc")
raises(TypeError, u"hello".encode, "test.mytestenc")
def test_one_arg_encoder(self):
import _codecs
def search_function(encoding):
def encode_one(u):
return (b'foo', len(u))
def decode_one(u):
return (u'foo', len(u))
if encoding == 'onearg':
return (encode_one, decode_one, None, None)
return None
_codecs.register(search_function)
assert u"hello".encode("onearg") == b'foo'
assert b"hello".decode("onearg") == u'foo'
assert _codecs.encode(u"hello", "onearg") == b'foo'
assert _codecs.decode(b"hello", "onearg") == u'foo'
def test_cpytest_decode(self):
import codecs
assert codecs.decode(b'\xe4\xf6\xfc', 'latin-1') == u'\xe4\xf6\xfc'
raises(TypeError, codecs.decode)
assert codecs.decode(b'abc') == u'abc'
raises(UnicodeDecodeError, codecs.decode, b'\xff', 'ascii')
def test_bad_errorhandler_return(self):
import codecs
def baddecodereturn1(exc):
return 42
codecs.register_error("test.baddecodereturn1", baddecodereturn1)
raises(TypeError, b"\xff".decode, "ascii", "test.baddecodereturn1")
raises(TypeError, b"\\".decode, "unicode-escape", "test.baddecodereturn1")
raises(TypeError, b"\\x0".decode, "unicode-escape", "test.baddecodereturn1")
raises(TypeError, b"\\x0y".decode, "unicode-escape", "test.baddecodereturn1")
raises(TypeError, b"\\Uffffeeee".decode, "unicode-escape", "test.baddecodereturn1")
raises(TypeError, b"\\uyyyy".decode, "raw-unicode-escape", "test.baddecodereturn1")
def test_cpy_bug1175396(self):
import codecs, StringIO
s = [
'<%!--===================================================\r\n',
' BLOG index page: show recent articles,\r\n',
' today\'s articles, or articles of a specific date.\r\n',
'========================================================--%>\r\n',
'<%@inputencoding="ISO-8859-1"%>\r\n',
'<%@pagetemplate=TEMPLATE.y%>\r\n',
'<%@import=import frog.util, frog%>\r\n',
'<%@import=import frog.objects%>\r\n',
'<%@import=from frog.storageerrors import StorageError%>\r\n',
'<%\r\n',
'\r\n',
'import logging\r\n',
'log=logging.getLogger("Snakelets.logger")\r\n',
'\r\n',
'\r\n',
'user=self.SessionCtx.user\r\n',
'storageEngine=self.SessionCtx.storageEngine\r\n',
'\r\n',
'\r\n',
'def readArticlesFromDate(date, count=None):\r\n',
' entryids=storageEngine.listBlogEntries(date)\r\n',
' entryids.reverse() # descending\r\n',
' if count:\r\n',
' entryids=entryids[:count]\r\n',
' try:\r\n',
' return [ frog.objects.BlogEntry.load(storageEngine, date, Id) for Id in entryids ]\r\n',
' except StorageError,x:\r\n',
' log.error("Error loading articles: "+str(x))\r\n',
' self.abort("cannot load articles")\r\n',
]
stream = StringIO.StringIO("".join(s).encode("utf7"))
assert b"aborrt" not in stream.getvalue()
reader = codecs.getreader("utf7")(stream)
for (i, line) in enumerate(reader):
assert line == s[i]
def test_buffer_encode(self):
import _codecs, array
assert _codecs.readbuffer_encode(array.array('c', 'spam')) == ('spam', 4)
exc = raises(TypeError, _codecs.charbuffer_encode, array.array('c', 'spam'))
assert "must be string or read-only character buffer, not array.array" in str(exc.value)
assert _codecs.readbuffer_encode(u"test") == ('test', 4)
assert _codecs.charbuffer_encode(u"test") == ('test', 4)
def test_utf8sig(self):
import codecs
d = codecs.getincrementaldecoder("utf-8-sig")()
s = u"spam"
assert d.decode(s.encode("utf-8-sig")) == s
def test_decoder_state(self):
import codecs
encoding = 'utf16'
u = 'abc123'
s = u.encode(encoding)
for i in range(len(u) + 1):
d = codecs.getincrementalencoder(encoding)()
part1 = d.encode(u[:i])
state = d.getstate()
d = codecs.getincrementalencoder(encoding)()
d.setstate(state)
part2 = d.encode(u[i:], True)
assert s == part1 + part2
def test_escape_decode_escaped_newline(self):
import _codecs
s = b'\\\n'
decoded = _codecs.unicode_escape_decode(s)[0]
assert decoded == ''
def test_charmap_decode_1(self):
import codecs
assert codecs.charmap_encode(u'xxx') == (b'xxx', 3)
assert codecs.charmap_encode(u'xxx', 'strict', {ord('x'): b'XX'}) == (b'XXXXXX', 3)
res = codecs.charmap_decode(b"\x00\x01\x02", "replace", u"ab")
assert res == (u"ab\ufffd", 3)
res = codecs.charmap_decode(b"\x00\x01\x02", "replace", u"ab\ufffe")
assert res == (u'ab\ufffd', 3)
def test_decode_errors(self):
import sys
if sys.maxunicode > 0xffff:
try:
b"\x00\x00\x00\x00\x00\x11\x11\x00".decode("unicode_internal")
except UnicodeDecodeError as ex:
assert "unicode_internal" == ex.encoding
assert b"\x00\x00\x00\x00\x00\x11\x11\x00" == ex.object
assert ex.start == 4
assert ex.end == 8
else:
raise Exception("DID NOT RAISE")
def test_errors(self):
import codecs
assert codecs.replace_errors(UnicodeEncodeError(
"ascii", u"\u3042", 0, 1, "ouch")) == (u"?", 1)
assert codecs.replace_errors(UnicodeDecodeError(
"ascii", b"\xff", 0, 1, "ouch")) == (u"\ufffd", 1)
assert codecs.replace_errors(UnicodeTranslateError(
u"\u3042", 0, 1, "ouch")) == (u"\ufffd", 1)
assert codecs.replace_errors(UnicodeEncodeError(
"ascii", u"\u3042\u3042", 0, 2, "ouch")) == (u"??", 2)
assert codecs.replace_errors(UnicodeDecodeError(
"ascii", b"\xff\xff", 0, 2, "ouch")) == (u"\ufffd", 2)
assert codecs.replace_errors(UnicodeTranslateError(
u"\u3042\u3042", 0, 2, "ouch")) == (u"\ufffd\ufffd", 2)
class BadStartUnicodeEncodeError(UnicodeEncodeError):
def __init__(self):
UnicodeEncodeError.__init__(self, "ascii", u"", 0, 1, "bad")
self.start = []
# A UnicodeEncodeError object with a bad object attribute
class BadObjectUnicodeEncodeError(UnicodeEncodeError):
def __init__(self):
UnicodeEncodeError.__init__(self, "ascii", u"", 0, 1, "bad")
self.object = []
# A UnicodeDecodeError object without an end attribute
class NoEndUnicodeDecodeError(UnicodeDecodeError):
def __init__(self):
UnicodeDecodeError.__init__(self, "ascii", b"", 0, 1, "bad")
del self.end
# A UnicodeDecodeError object with a bad object attribute
class BadObjectUnicodeDecodeError(UnicodeDecodeError):
def __init__(self):
UnicodeDecodeError.__init__(self, "ascii", b"", 0, 1, "bad")
self.object = []
# A UnicodeTranslateError object without a start attribute
class NoStartUnicodeTranslateError(UnicodeTranslateError):
def __init__(self):
UnicodeTranslateError.__init__(self, u"", 0, 1, "bad")
del self.start
# A UnicodeTranslateError object without an end attribute
class NoEndUnicodeTranslateError(UnicodeTranslateError):
def __init__(self):
UnicodeTranslateError.__init__(self, u"", 0, 1, "bad")
del self.end
# A UnicodeTranslateError object without an object attribute
class NoObjectUnicodeTranslateError(UnicodeTranslateError):
def __init__(self):
UnicodeTranslateError.__init__(self, u"", 0, 1, "bad")
del self.object
import codecs
raises(TypeError, codecs.replace_errors, BadObjectUnicodeEncodeError())
raises(TypeError, codecs.replace_errors, 42)
# "replace" complains about the wrong exception type
raises(TypeError, codecs.replace_errors, UnicodeError("ouch"))
raises(TypeError, codecs.replace_errors, BadObjectUnicodeEncodeError())
raises(TypeError, codecs.replace_errors, BadObjectUnicodeDecodeError()
)
# With the correct exception, "replace" returns an "?" or u"\ufffd" replacement
def test_decode_ignore(self):
assert b'\xff'.decode('utf-7', 'ignore') == ''
assert b'\x00'.decode('unicode-internal', 'ignore') == ''
def test_backslashreplace(self):
import sys
sin = u"a\xac\u1234\u20ac\u8000\U0010ffff"
if sys.maxunicode > 65535:
expected_ascii = b"a\\xac\\u1234\\u20ac\\u8000\\U0010ffff"
expected_8859 = b"a\xac\\u1234\xa4\\u8000\\U0010ffff"
else:
expected_ascii = b"a\\xac\\u1234\\u20ac\\u8000\\udbff\\udfff"
expected_8859 = b"a\xac\\u1234\xa4\\u8000\\udbff\\udfff"
assert sin.encode('ascii', 'backslashreplace') == expected_ascii
assert sin.encode("iso-8859-15", "backslashreplace") == expected_8859
def test_badhandler(self):
import codecs
results = ( 42, u"foo", (1,2,3), (u"foo", 1, 3), (u"foo", None), (u"foo",), ("foo", 1, 3), ("foo", None), ("foo",) )
encs = ("ascii", "latin-1", "iso-8859-1", "iso-8859-15")
for res in results:
codecs.register_error("test.badhandler", lambda x: res)
for enc in encs:
raises(
TypeError,
u"\u3042".encode,
enc,
"test.badhandler"
)
for (enc, bytes) in (
("utf-8", b"\xff"),
("ascii", b"\xff"),
("utf-7", b"+x-"),
("unicode-internal", b"\x00"),
):
raises(
TypeError,
bytes.decode,
enc,
"test.badhandler"
)
def test_badhandler_longindex(self):
import codecs
import sys
errors = 'test.badhandler_longindex'
codecs.register_error(errors, lambda x: (u'', sys.maxsize + 1))
# CPython raises OverflowError here
raises((IndexError, OverflowError), b'apple\x92ham\x93spam'.decode, 'utf-8', errors)
def test_unicode_internal(self):
import codecs
import sys
try:
b'\x00'.decode('unicode-internal')
except UnicodeDecodeError:
pass
else:
raise Exception("DID NOT RAISE")
res = b"\x00\x00\x00\x00\x00".decode("unicode-internal", "replace")
if sys.maxunicode > 65535:
assert res == u"\u0000\ufffd" # UCS4 build
else:
assert res == u"\x00\x00\ufffd" # UCS2 build
res = "\x00\x00\x00\x00\x00".decode("unicode-internal", "ignore")
if sys.maxunicode > 65535:
assert res == u"\u0000" # UCS4 build
else:
assert res == u"\x00\x00" # UCS2 build
def handler_unicodeinternal(exc):
if not isinstance(exc, UnicodeDecodeError):
raise TypeError("don't know how to handle %r" % exc)
return (u"\x01", 5)
codecs.register_error("test.hui", handler_unicodeinternal)
res = b"\x00\x00\x00\x00\x00".decode("unicode-internal", "test.hui")
if sys.maxunicode > 65535:
assert res == u"\u0000\u0001" # UCS4 build
else:
assert res == u"\x00\x00\x01" # UCS2 build
def handler1(exc):
if not isinstance(exc, UnicodeEncodeError) \
and not isinstance(exc, UnicodeDecodeError):
raise TypeError("don't know how to handle %r" % exc)
l = [u"<%d>" % ord(exc.object[pos]) for pos in xrange(exc.start, exc.end)]
return (u"[%s]" % u"".join(l), exc.end)
codecs.register_error("test.handler1", handler1)
assert b"\\u3042\u3xxx".decode("unicode-escape", "test.handler1") == \
u"\u3042[<92><117><51>]xxx"
def test_unicode_internal_error_handler_infinite_loop(self):
import codecs
class MyException(Exception):
pass
seen = [0]
def handler_unicodeinternal(exc):
if not isinstance(exc, UnicodeDecodeError):
raise TypeError("don't know how to handle %r" % exc)
seen[0] += 1
if seen[0] == 20: # stop the 20th time this is called
raise MyException
return (u"\x01", 4) # 4 < len(input), so will try and fail again
codecs.register_error("test.inf", handler_unicodeinternal)
try:
"\x00\x00\x00\x00\x00".decode("unicode-internal", "test.inf")
except MyException:
pass
else:
raise AssertionError("should have gone into infinite loop")
def test_encode_error_bad_handler(self):
import codecs
codecs.register_error("test.bad_handler", lambda e: (repl, 1))
assert u"xyz".encode("latin-1", "test.bad_handler") == "xyz"
repl = u"\u1234"
raises(UnicodeEncodeError, u"\u5678".encode, "latin-1",
"test.bad_handler")
repl = u"\u00E9"
s = u"\u5678".encode("latin-1", "test.bad_handler")
assert s == '\xe9'
def test_charmap_encode(self):
assert 'xxx'.encode('charmap') == 'xxx'
import codecs
exc = raises(TypeError, codecs.charmap_encode, u'\xff', "replace", {0xff: 300})
assert str(exc.value) == 'character mapping must be in range(256)'
exc = raises(TypeError, codecs.charmap_encode, u'\xff', "replace", {0xff: u'a'})
assert str(exc.value) == 'character mapping must return integer, None or str'
raises(UnicodeError, codecs.charmap_encode, u"\xff", "replace", {0xff: None})
def test_charmap_encode_replace(self):
charmap = dict([ (ord(c), 2*c.upper()) for c in "abcdefgh"])
charmap[ord("?")] = "XYZ"
import codecs
sin = u"abcDEF"
sout = codecs.charmap_encode(sin, "replace", charmap)[0]
assert sout == "AABBCCXYZXYZXYZ"
def test_charmap_decode_2(self):
assert 'foo'.decode('charmap') == 'foo'
def test_charmap_build(self):
import codecs
assert codecs.charmap_build(u'123456') == {49: 0, 50: 1, 51: 2,
52: 3, 53: 4, 54: 5}
def test_utf7_start_end_in_exception(self):
try:
'+IC'.decode('utf-7')
except UnicodeDecodeError as exc:
assert exc.start == 0
assert exc.end == 3
def test_utf7_surrogate(self):
assert '+3ADYAA-'.decode('utf-7') == u'\udc00\ud800'
def test_utf7_errors(self):
import codecs
tests = [
(b'a\xffb', u'a\ufffdb'),
(b'a+IK', u'a\ufffd'),
(b'a+IK-b', u'a\ufffdb'),
(b'a+IK,b', u'a\ufffdb'),
(b'a+IKx', u'a\u20ac\ufffd'),
(b'a+IKx-b', u'a\u20ac\ufffdb'),
(b'a+IKwgr', u'a\u20ac\ufffd'),
(b'a+IKwgr-b', u'a\u20ac\ufffdb'),
(b'a+IKwgr,', u'a\u20ac\ufffd'),
(b'a+IKwgr,-b', u'a\u20ac\ufffd-b'),
(b'a+IKwgrB', u'a\u20ac\u20ac\ufffd'),
(b'a+IKwgrB-b', u'a\u20ac\u20ac\ufffdb'),
(b'a+/,+IKw-b', u'a\ufffd\u20acb'),
(b'a+//,+IKw-b', u'a\ufffd\u20acb'),
(b'a+///,+IKw-b', u'a\uffff\ufffd\u20acb'),
(b'a+////,+IKw-b', u'a\uffff\ufffd\u20acb'),
(b'a+2AE\xe1b', u'a\ufffdb'),
(b'a+2AEA-b', u'a\ufffdb'),
(b'a+2AH-b', u'a\ufffdb'),
]
for raw, expected in tests:
raises(UnicodeDecodeError, codecs.utf_7_decode, raw, 'strict', True)
assert raw.decode('utf-7', 'replace') == expected
def test_utf_16_encode_decode(self):
import codecs, sys
x = u'123abc'
if sys.byteorder == 'big':
assert codecs.getencoder('utf-16')(x) == (
b'\xfe\xff\x001\x002\x003\x00a\x00b\x00c', 6)
assert codecs.getdecoder('utf-16')(
b'\xfe\xff\x001\x002\x003\x00a\x00b\x00c') == (x, 14)
else:
assert codecs.getencoder('utf-16')(x) == (
b'\xff\xfe1\x002\x003\x00a\x00b\x00c\x00', 6)
assert codecs.getdecoder('utf-16')(
b'\xff\xfe1\x002\x003\x00a\x00b\x00c\x00') == (x, 14)
def test_unicode_escape(self):
assert u'\\'.encode('unicode-escape') == '\\\\'
assert b'\\\\'.decode('unicode-escape') == u'\\'
assert u'\ud801'.encode('unicode-escape') == '\\ud801'
assert u'\u0013'.encode('unicode-escape') == '\\x13'
def test_mbcs(self):
import sys
if sys.platform != 'win32':
return
toencode = u'caf\xe9', 'caf\xe9'
try:
# test for non-latin1 codepage, more general test needed
import _winreg
key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,
r'System\CurrentControlSet\Control\Nls\CodePage')
if _winreg.QueryValueEx(key, 'ACP')[0] == u'1255': # non-latin1
toencode = u'caf\xbf','caf\xbf'
except:
assert False, 'cannot test mbcs on this windows system, check code page'
assert u'test'.encode('mbcs') == 'test'
assert toencode[0].encode('mbcs') == toencode[1]
assert u'\u040a'.encode('mbcs') == '?' # some cyrillic letter
assert 'cafx\e9'.decode('mbcs') == u'cafx\e9'
def test_bad_handler_string_result(self):
import _codecs
def f(exc):
return ('foo', exc.end)
_codecs.register_error("test.test_codecs_not_a_string", f)
raises(TypeError, u'\u1234'.encode, 'ascii',
'test.test_codecs_not_a_string')
def test_decode_bytearray(self):
import _codecs
b = bytearray()
assert _codecs.ascii_decode(b) == (u'', 0)
assert _codecs.latin_1_decode(b) == (u'', 0)
assert _codecs.utf_7_decode(b) == (u'', 0)
assert _codecs.utf_8_decode(b) == (u'', 0)
assert _codecs.utf_16_be_decode(b) == (u'', 0)
assert _codecs.utf_16_decode(b) == (u'', 0)
assert _codecs.utf_16_le_decode(b) == (u'', 0)
assert _codecs.utf_16_ex_decode(b) == (u'', 0, 0)
assert _codecs.utf_32_decode(b) == (u'', 0)
assert _codecs.utf_32_be_decode(b) == (u'', 0)
assert _codecs.utf_32_le_decode(b) == (u'', 0)
assert _codecs.utf_32_ex_decode(b) == (u'', 0, 0)
assert _codecs.charmap_decode(b) == (u'', 0)
assert _codecs.unicode_escape_decode(b) == (u'', 0)
assert _codecs.raw_unicode_escape_decode(b) == (u'', 0)
assert _codecs.unicode_internal_decode(b) == (u'', 0)
def test_xmlcharrefreplace(self):
r = u'\u1234\u0080\u2345\u0079\u00AB'.encode('latin1', 'xmlcharrefreplace')
assert r == 'ሴ\x80⍅y\xab'
r = u'\u1234\u0080\u2345\u0079\u00AB'.encode('ascii', 'xmlcharrefreplace')
assert r == 'ሴ€⍅y«'
def test_errorhandler_collection(self):
import _codecs
errors = []
def record_error(exc):
if not isinstance(exc, UnicodeEncodeError):
raise TypeError("don't know how to handle %r" % exc)
errors.append(exc.object[exc.start:exc.end])
return (u'', exc.end)
_codecs.register_error("test.record", record_error)
sin = u"\xac\u1234\u1234\u20ac\u8000"
assert sin.encode("ascii", "test.record") == ""
assert errors == [sin]
errors = []
assert sin.encode("latin-1", "test.record") == "\xac"
assert errors == [u'\u1234\u1234\u20ac\u8000']
errors = []
assert sin.encode("iso-8859-15", "test.record") == "\xac\xa4"
assert errors == [u'\u1234\u1234', u'\u8000']
def test_last_byte_handler(self):
# issue bb-2389
import _codecs
_codecs.register_error('custom_replace', lambda exc: (u'\ufffd', exc.start+1))
for s, res in ((b"WORD\xe3\xab",
(u'WORD\ufffd\ufffd', u'WORD\ufffd')),
(b"\xef\xbb\xbfWORD\xe3\xabWORD2",
(u'\ufeffWORD\ufffd\ufffdWORD2',
u'\ufeffWORD\ufffdWORD2'))):
r = s.decode('utf8', 'replace')
assert r == res[1]
r = s.decode('utf8', 'custom_replace')
assert r == res[0]
|
"""
Attribute token definition file for Bentham Instruments Spectroradiometer
Control DLL.
"""
# -----------------------------------------------------------------------------
# Monochromator attributes
# -----------------------------------------------------------------------------
MonochromatorScanDirection = 10
MonochromatorCurrentWL = 11
MonochromatorCurrentDialReading = 12
MonochromatorParkDialReading = 13
MonochromatorCurrentGrating = 14
MonochromatorPark = 15
MonochromatorSelfPark = 16
MonochromatorModeSwitchNum = 17
MonochromatorModeSwitchState = 18
MonochromatorCanModeSwitch = 19
Gratingd = 20
GratingZ = 21
GratingA = 22
GratingWLMin = 23
GratingWLMax = 24
GratingX2 = 25
GratingX1 = 26
GratingX = 27
ChangerZ = 50
# -----------------------------------------------------------------------------
# Filter wheel attributes
# -----------------------------------------------------------------------------
FWheelFilter = 100
FWheelPositions = 101
FWheelCurrentPosition = 102
# -----------------------------------------------------------------------------
# TLS attributes
# -----------------------------------------------------------------------------
TLSCurrentPosition = 150
TLSWL = 151
TLSPOS = 152
TLSSelectWavelength = 153
TLSPositionsCommand = 154
# -----------------------------------------------------------------------------
# Switch-over box attributes
# -----------------------------------------------------------------------------
SOBInitialState = 200
SOBState = 202
# -----------------------------------------------------------------------------
# SAM attributes
# -----------------------------------------------------------------------------
SAMInitialState = 300
SAMSwitchWL = 301
SAMState = 302
SAMCurrentState = 303
# -----------------------------------------------------------------------------
# Stepper SAM attributes
# -----------------------------------------------------------------------------
SSEnergisedSteps = 320
SSRelaxedSteps = 321
SSMaxSteps = 322
SSSpeed = 323
SSMoveCurrent = 324
SSIdleCurrent = 325
# -----------------------------------------------------------------------------
# 262 attributes
# -----------------------------------------------------------------------------
biRelay = 350
biCurrentRelay = 351
# -----------------------------------------------------------------------------
# MVSS attributes
# -----------------------------------------------------------------------------
MVSSSwitchWL = 401
MVSSWidth = 402
MVSSCurrentWidth = 403
MVSSSetWidth = 404
MVSSConstantBandwidth = 405
MVSSConstantwidth = 406
MVSSSlitMode = 407
MVSSPosition = 408
# -----------------------------------------------------------------------------
# ADC attributes
# -----------------------------------------------------------------------------
ADCSamplesPerReading = 500
ADCAdaptiveIntegration = 501
ADCSamplePeriod = 502
ADCVolts = 504
# -----------------------------------------------------------------------------
# ADC CHOPPER attributes
# -----------------------------------------------------------------------------
ADCChoppedAverages = 503
# -----------------------------------------------------------------------------
# General amplifier attributes
# -----------------------------------------------------------------------------
AmpGain = 600
AmpChannel = 601
AmpMinRange = 602
AmpMaxRange = 603
AmpStartRange = 604
AmpUseSetup = 605
AmpCurrentRange = 606
AmpCurrentChannel = 607
AmpOverload = 608
AmpOverrideWl = 609
# -----------------------------------------------------------------------------
# 225 attributes
# -----------------------------------------------------------------------------
A225TargetRange = 700
A225PhaseVariable = 701
A225PhaseQuadrant = 702
A225TimeConstant = 703
A225fMode = 704
# -----------------------------------------------------------------------------
# Camera attributes
# -----------------------------------------------------------------------------
CameraIntegrationTime = 800
CameraMinWl = 801
CameraMaxWl = 802
CameraNumPixelsW = 803
CameraWidth = 804
CameraDataSize_nm = 805
CameraSAMState = 806
CameraAutoRange = 807
CameraMVSSWidth = 808
CameraAverages = 809
CameraMinITime = 810
CameraMaxITime = 811
CameraUnitMaxITime = 812
CameraZCITime = 813
CameraZCAverages = 814
CameraDataLToR = 815
# -----------------------------------------------------------------------------
# Motorised Stage attributes
# -----------------------------------------------------------------------------
MotorPosition = 900
# -----------------------------------------------------------------------------
# Miscellaneous attributes
# -----------------------------------------------------------------------------
biSettleDelay = 1000
biMin = 1001
biMax = 1002
biParkPos = 1003
biInput = 1004
biCurrentInput = 1005
biMoveWithWavelength = 1006
biHasSetupWindow = 1007
biHasAdvancedWindow = 1008
biDescriptor = 1009
biParkOffset = 1010
biProductName = 1011
# -----------------------------------------------------------------------------
# System attributes
# -----------------------------------------------------------------------------
SysStopCount = 9000
SysDarkIIntegrationTime = 9001
Sys225_277Input = 9002
# -----------------------------------------------------------------------------
# Bentham Hardware Types
# -----------------------------------------------------------------------------
BenInterface = 10000
BenSAM = 10001
BenSlit = 10002
BenFilterWheel = 10003
BenADC = 10004
BenPREAMP = 10005
BenACAMP = 10006
BenDCAMP = 10007
BenPOSTAMP = 10012
BenRelayUnit = 10008
BenMono = 10009
BenAnonDevice = 10010
BenCamera = 10020
BenDiodeArray = 10021
BenORM = 10022
BenUnknown = 10011
|
"""
#
# 26/08/2018
# Oladotun Rominiyi - Copyright © 2018. all rights reserved.
"""
__author__ = 'dotun rominiyi'
# IMPORTS
import ujson
import ssl
import websockets
from base64 import b64decode
from zlib import decompress, MAX_WBITS
from signalr_aio.transports import Transport as SignalRTransport
from signalr_aio import Connection as SignalRConnection
from cosine.core.proc_workers import CosineProcEventWorker
from cosine.venues.base_venue import AsyncEvents
from cosine.venues.bem.types import (
BlockExMarketsAsyncOrder,
BlockExMarketsAsyncExecution,
BlockExMarketsAsyncCancelOrderResponse,
BlockExMarketsAsyncCancelAllResponse
)
# MODULE FUNCTIONS
setattr(SignalRConnection, 'last_send_id', property(lambda self: self._Connection__send_counter))
# MODULE CLASSES
class BlockExMarketsSignalRWorker(CosineProcEventWorker):
def __init__(self, group=None, target=None, name=None, args=(), kwargs={}):
super().__init__(group, target, name, args, kwargs)
self._hub = None
self._connection = None
self._invoke_handling = {}
self.events.OnPlaceOrder = CosineProcEventWorker.EventSlot()
self.events.OnExecution = CosineProcEventWorker.EventSlot()
self.events.OnCancelOrder = CosineProcEventWorker.EventSlot()
self.events.OnCancelAllOrders = CosineProcEventWorker.EventSlot()
self.events.OnLatestBids = CosineProcEventWorker.EventSlot()
self.events.OnLatestAsks = CosineProcEventWorker.EventSlot()
self.events.OnMarketTick = CosineProcEventWorker.EventSlot()
self.events.OnError = CosineProcEventWorker.EventSlot()
"""Worker process websockets setup"""
def _setup_websockets_ssl_certs(self):
cert_file = self.kwargs["CertFile"]
context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
context.load_verify_locations(cert_file)
# monkeypatch the transport to let us connect via a custom SSLContext
async def socket(this, loop):
async with websockets.connect(
this._ws_params.socket_url,
extra_headers=this._ws_params.headers,
loop=loop,
ssl=context
) as this.ws:
this._connection.started = True
await this.handler(this.ws)
SignalRTransport.socket = socket
"""Worker process setup"""
def run(self):
# setup SSL context construction if required
if self.kwargs["CertFile"]:
self._setup_websockets_ssl_certs()
# setup SignalR connection (w/ authentication)
connection = SignalRConnection(f"{self.kwargs['APIDomain']}/signalr", session=None)
connection.qs = {'access_token': self.kwargs['access_token']}
hub = connection.register_hub('TradingHub')
self._hub = hub
self._connection = connection
# Set event handlers
hub.client.on('MarketTradesRefreshed', lambda x: None)
hub.client.on('MarketOrdersRefreshed', self.on_market_tick_received)
hub.client.on('tradeCreated', self.on_execution_received)
hub.client.on('createTradeOrderResult', self.on_place_order_received)
hub.client.on('cancelTradeOrderResult', self.on_cancel_order_received)
hub.client.on('cancelAllTradeOrdersResult', self.on_cancel_all_orders_received)
connection.received += self.on_raw_msg_received
connection.error += self.on_error_received
connection.start()
pass
"""Worker process teardown"""
def join(self, timeout=None):
if self._connection:
self._connection.close()
"""Worker process raw message processors"""
@staticmethod
def process_compact_raw_msg(raw_msg):
deflated_msg = decompress(b64decode(raw_msg), -MAX_WBITS)
return ujson.loads(deflated_msg.decode())
@staticmethod
def process_raw_msg(raw_msg):
return ujson.loads(raw_msg)
"""Worker process server invocation handling"""
def invoke(self, method, *data, callback=None):
inv_id = self._connection.last_send_id + 1
self._invoke_handling[inv_id] = {"cb": callback, "a": (method, data)}
self._hub.server.invoke(method, *data)
return inv_id
"""Worker process raw message received"""
async def on_raw_msg_received(self, **msg):
if not ('I' in msg): return
inv_id = msg['I']
h = self._invoke_handling.get(inv_id)
if h:
if 'R' in msg and type(msg['R']) is not bool:
msg = BlockExMarketsSignalRWorker.process_raw_msg(msg['R'])
h["cb"](msg, h["a"])
"""Worker process error received"""
async def on_error_received(self, **msg):
self.enqueue_event(AsyncEvents.OnError, msg)
"""Worker process market tick received"""
async def on_market_tick_received(self, msg):
self.invoke("getBids", self.kwargs['APIID'], msg[0]['instrumentID'], callback=self.on_bids_received)
"""Worker process market tick received"""
async def on_bids_received(self, bids_msg, req):
(_, msg) = req
bids_msg['instrumentID'] = msg[0]['instrumentID']
self.enqueue_event('OnLatestBids', bids_msg)
self.invoke("getAsks", self.kwargs['APIID'], msg[0]['instrumentID'], callback=self.on_asks_received)
"""Worker process market tick received"""
async def on_asks_received(self, asks_msg, req):
(_, msg) = req
asks_msg['instrumentID'] = msg[0]['instrumentID']
self.enqueue_event('OnLatestAsks', asks_msg)
"""Worker process place order response received"""
async def on_place_order_received(self, msg):
self.enqueue_event(AsyncEvents.OnPlaceOrder, BlockExMarketsAsyncOrder(signalr_msg=msg))
"""Worker process place order response received"""
async def on_execution_received(self, msg):
self.enqueue_event(AsyncEvents.OnExecution, BlockExMarketsAsyncExecution(signalr_msg=msg))
"""Worker process cancel order response received"""
async def on_cancel_order_received(self, msg):
self.enqueue_event(AsyncEvents.OnCancelOrder, BlockExMarketsAsyncCancelOrderResponse(signalr_msg=msg))
"""Worker process cancel all response received"""
async def on_cancel_all_orders_received(self, msg):
self.enqueue_event(AsyncEvents.OnCancelAllOrders, BlockExMarketsAsyncCancelAllResponse(signalr_msg=msg))
|
# coding=utf-8
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from unittest import TestCase
from tests.misc.helper import get_slave_xmss
from xrd.core.misc import logger
from xrd.core.BlockMetadata import BlockMetadata
from xrd.core.State import State
from tests.misc.helper import set_xrd_dir, get_alice_xmss
logger.initialize_default()
alice = get_alice_xmss()
slave = get_slave_xmss()
class TestTokenMetadata(TestCase):
def setUp(self):
with set_xrd_dir('no_data'):
self.state = State()
def test_put_block_metadata(self):
block_metadata = BlockMetadata.create()
block_metadata.update_last_headerhashes([b'test1', b'test2'], b'test3')
BlockMetadata.put_block_metadata(self.state, b'block_headerhash', block_metadata, None)
BlockMetadata.put_block_metadata(self.state, b'block_headerhash2', BlockMetadata.create(), None)
self.assertEqual(BlockMetadata.get_block_metadata(self.state, b'block_headerhash').to_json(),
block_metadata.to_json())
expected_json = b'{\n "blockDifficulty": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=",\n ' \
b'"cumulativeDifficulty": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="\n}'
self.assertEqual(BlockMetadata.get_block_metadata(self.state, b'block_headerhash2').to_json(),
expected_json)
def test_get_block_metadata(self):
self.assertIsNone(BlockMetadata.get_block_metadata(self.state, b'test1'))
BlockMetadata.put_block_metadata(self.state, b'block_headerhash2', BlockMetadata.create(), None)
tmp_json = BlockMetadata.get_block_metadata(self.state, b'block_headerhash2').to_json()
expected_json = b'{\n "blockDifficulty": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=",\n ' \
b'"cumulativeDifficulty": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="\n}'
self.assertEqual(tmp_json, expected_json)
|
from . import opentdb as api
from random import shuffle
import json
import psycopg2
from collections import Counter
class PostgreHelp:
def __init__(self):
#self.redisClient = redis.Redis(host="127.0.0.1", port=6379)
self.user = 'user'
self.connection = psycopg2.connect(user="postgres",
password="admin",
host="127.0.0.1",
port="5432",
database="trivia")
'''
self.connection = psycopg2.connect(user="mmeqtzuigcrudl",
password="2f55bd846a95842e210cc3ac1585d2254479ccd33219fc5615b23aa1de07feb9",
host="ec2-184-72-236-57.compute-1.amazonaws.com",
port="5432",
database="dauao6n2jna0eu")
'''
def SetQuestion(self, groupname):
question = ''
answers = ''
try:
cursor = self.connection.cursor()
existingQuestion = "SELECT * FROM public.questions where groupname = '{}' ".format(
groupname)
cursor.execute(existingQuestion)
self.connection.commit()
difficulty = 'easy'
quesno = 0
exist = False
if cursor.rowcount > 0:
row = cursor.fetchone()
quesno = int(row[7])
exist = True
if quesno <= 10:
difficulty = 'easy'
elif quesno > 10 and quesno < 20:
difficulty = 'medium'
else:
difficulty = 'hard'
questionResult = api.GetQuestion(difficulty)
result = questionResult['results'][0]
question = result['question']
correct_answer = result['correct_answer']
answers = result['incorrect_answers']
answers.append(correct_answer)
shuffle(answers)
quesno += 1
if exist:
#updateUserNO = "UPDATE questions SET quesno=%s, correctanswer=%s, difficulty=%s, question=%s, answers=%s WHERE groupname= %s"
#updateUserNO = (quesno,correct_answer,difficulty,question,answers,groupname)
updateQuestionNo = "UPDATE questions SET quesno=%s, correctanswer=%s, difficulty=%s, question=%s, answers=%s WHERE groupname= %s"
ans = (quesno, correct_answer, difficulty,
question, '{}', groupname)
cursor.execute(updateQuestionNo, ans)
self.connection.commit()
else:
question_insert = "INSERT INTO public.questions(groupname, userno, correctanswer, difficulty, question,quesno) VALUES ( '{}',{},'{}','{}','{}',{})"
question_insert = question_insert.format(
groupname, quesno, correct_answer, difficulty, question, 1)
cursor.execute(question_insert)
self.connection.commit()
except (Exception, psycopg2.Error) as error:
if(self.connection):
print(error)
finally:
# closing database connection.
if(self.connection):
cursor.close()
self.connection.close()
print("PostgreSQL connection is closed")
return (question, answers)
def GetQuestion(self, groupname):
cursor = self.connection.cursor()
existingQuestion = 'SELECT * FROM public.questions where groupname = %s '
cursor.execute(existingQuestion, groupname)
self.connection.commit()
question = None
answers = None
if cursor.rowcount > 0:
row = cursor.fetchone()
question = int(row[5])
answers = int(row[6])
return (question, answers)
def GetUserCount(self, groupname, maxuser, username):
# the group already contains the user
cursor = self.connection.cursor()
existing_user = "SELECT Count(1) FROM public.answers where groupname = '{0}' ".format(
groupname)
cursor.execute(existing_user)
self.connection.commit()
row = cursor.fetchone()
totalusers = int(row[0])
if(totalusers < maxuser):
cursor = self.connection.cursor()
existing_user = "SELECT Count(1) FROM public.answers where groupname = '{0}' and username= '{1}' ".format(
groupname, username)
cursor.execute(existing_user)
self.connection.commit()
row = cursor.fetchone()
currentUser = int(row[0])
if currentUser == 0:
self.ConnectionAdd(groupname, username, '')
totalusers += 1
elif totalusers == maxuser:
totalusers += 1
return totalusers
def UpdateUserCount(self, groupname, users):
users += 1
updateUserCount = "UPDATE questions SET userno={0} WHERE groupname= '{1}'".format(
users, groupname)
cursor = self.connection.cursor()
cursor.execute(updateUserCount)
self.connection.commit()
return users
def FirstUser(self, groupname, username):
query_firstperson = "INSERT INTO public.questions(groupname, userno, correctanswer, difficulty, question,quesno) VALUES ( '{}',{},'{}','{}','{}',{})"
query_firstperson = query_firstperson.format(
groupname, 1, '', 'easy', '', 0)
cursor = self.connection.cursor()
cursor.execute(query_firstperson)
self.connection.commit()
self.ConnectionAdd(groupname, username, '')
return 1
def ConnectionAdd(self, groupname, username, ans):
ans_query = "INSERT INTO public.answers( groupname, username, ans) VALUES ('{}', '{}', '{}')"
ans_query = ans_query.format(groupname, username, ans)
cursor = self.connection.cursor()
cursor.execute(ans_query)
self.connection.commit()
def AnswerUpdate(self, groupname, username, ans):
ans_query = "Update answers set ans = '{}' where groupname='{}' and username='{}'"
ans_query = ans_query.format(ans, groupname, username)
cursor = self.connection.cursor()
cursor.execute(ans_query)
self.connection.commit()
def GetCorrectAnswer(self, groupname):
cursor = self.connection.cursor()
existingQuestion = "SELECT correctanswer FROM public.questions where groupname = '{}'".format(groupname)
cursor.execute(existingQuestion)
self.connection.commit()
answers = None
if cursor.rowcount > 0:
row = cursor.fetchone()
answers = row[0]
return answers
def GetGroupAnswer(self, groupname):
correct_answer = self.GetCorrectAnswer(groupname)
cursor = self.connection.cursor()
existingQuestion = "SELECT * FROM answers where groupname = '{}'".format(
groupname)
cursor.execute(existingQuestion)
self.connection.commit()
ans_result = []
if cursor.rowcount > 0:
rows = cursor.fetchall()
for row in rows:
answers = row[3]
ans_result.append(answers)
ans_stat = Counter(ans_result)
print(ans_stat)
return (ans_stat.items(), correct_answer)
|
#!/usr/bin/env python3
import codecs
import json
import os
from urllib.request import urlopen
import yaml
def load_config(directory):
_repo_config = {}
for _basedir, _, _filenames in os.walk(directory):
for _filename in _filenames:
if not _filename.endswith('.yaml'):
continue
_path = os.path.join(_basedir, _filename)
try:
with open(_path, 'r') as f:
_config = yaml.safe_load(f)
except:
print('failed to load YAML from {}'.format(_path))
raise
if 'zz_generated_metadata' not in _config:
continue
_org_repo = '{org}/{repo}'.format(**_config['zz_generated_metadata'])
if _org_repo not in _repo_config:
_repo_config[_org_repo] = {}
for _test in _config.get('tests', []):
if 'cluster_profile' not in _test.get('steps', {}):
continue
_job_name = 'pull-ci-{org}-{repo}-{branch}-{test_as}'.format(test_as=_test['as'], **_config['zz_generated_metadata'])
_test['steps']['platform'] = cluster_profile_platform(cluster_profile=_test['steps']['cluster_profile'])
_repo_config[_org_repo][_job_name] = _test['steps']
return _repo_config
def platform_stripped_workflows(repo_config):
_unstrippable = {}
_stripped = {}
for _jobs in repo_config.values():
for _job, _steps in _jobs.items():
_stripped_workflow = platform_stripped_workflow(workflow=_steps['workflow'], platform=_steps['platform'])
if not _stripped_workflow:
if _steps['workflow'] not in _unstrippable:
_unstrippable[_steps['workflow']] = {}
if _steps['platform'] not in _unstrippable[_steps['workflow']]:
_unstrippable[_steps['workflow']][_steps['platform']] = set()
_unstrippable[_steps['workflow']][_steps['platform']].add(_job)
continue
if _stripped_workflow not in _stripped:
_stripped[_stripped_workflow] = {}
_stripped[_stripped_workflow][_steps['platform']] = _steps['workflow']
if _unstrippable:
print('unable to determine platform-agnostic workflows for:')
for _workflow, _platforms in sorted(_unstrippable.items()):
print(' {}'.format(_workflow))
for _platform, _jobs in sorted(_platforms.items()):
_ellipsis = ''
if len(_jobs) > 3:
_ellipsis = ', ...'
print(' {} ({}{})'.format(_platform, ', '.join(sorted(_jobs)[:3]), _ellipsis))
return _stripped
def yield_interesting_jobs(repo_config, balanceable_workflows):
for _jobs in repo_config.values():
for _job, _steps in _jobs.items():
_stripped_workflow = platform_stripped_workflow(workflow=_steps['workflow'], platform=_steps['platform'])
if _stripped_workflow in balanceable_workflows:
yield _job
def cluster_profile_platform(cluster_profile):
"""Translate from steps.cluster_profile to workflow.as slugs."""
if cluster_profile == 'azure4':
return 'azure'
if cluster_profile == 'packet':
return 'metal'
return cluster_profile
def platform_stripped_workflow(workflow, platform):
_key = workflow.replace(platform, 'PLATFORM')
if 'PLATFORM' in _key:
return _key
return None
def get_prow_job_counts(uri, interesting_jobs):
with urlopen(uri) as response:
_jobs = json.load(codecs.getreader('utf-8')(response))
_counts = {}
for _job in _jobs.get('items', []):
_name = _job['spec']['job']
if _name not in interesting_jobs:
continue
_counts[_name] = _counts.get(_name, 0) + 1
return _counts
def print_counts(counts, job_steps, job_org_repos, stripped_workflows, platform_specific_repositories):
print('{}\t{}\t{}\t{}\t{}'.format('count', 'platform', 'status', 'alternatives', 'job'))
for _job, _count in sorted(counts.items(), key=lambda job_count: -job_count[1]):
_steps = job_steps[_job]
_stripped_workflow = platform_stripped_workflow(workflow=_steps['workflow'], platform=_steps['platform'])
_alternative_platforms = sorted(key for key in stripped_workflows[_stripped_workflow].keys() if key != _steps['platform'])
if _steps['platform'] not in _job:
_status = 'balanceable'
elif job_org_repos[_job] in platform_specific_repositories:
continue
else:
_status = 'unknown'
print('{}\t{}\t{}\t{}\t{}'.format(_count, _steps['platform'], _status, ','.join(_alternative_platforms), _job))
if __name__ == '__main__':
_repo_config = load_config(directory=os.path.join('ci-operator', 'config', 'openshift'))
platforms = set()
_job_steps = {}
_job_org_repos = {}
for _org_repo, _jobs in _repo_config.items():
for _job, _steps in _jobs.items():
platforms.add(_steps['platform'])
_job_steps[_job] = _steps
_job_org_repos[_job] = _org_repo
_stripped_workflows = platform_stripped_workflows(repo_config=_repo_config)
_balanceable_workflows = {workflow for workflow, platforms in _stripped_workflows.items() if len(platforms) > 1}
fixed_workflows = set(_stripped_workflows.keys()) - _balanceable_workflows
if fixed_workflows:
print('workflows which need alternative platforms to support balancing:')
for _workflow in sorted(fixed_workflows):
print(' {}'.format(list(_stripped_workflows[_workflow].values())[0]))
_interesting_jobs = set(yield_interesting_jobs(repo_config=_repo_config, balanceable_workflows=_balanceable_workflows))
_counts = get_prow_job_counts(uri='https://prow.svc.ci.openshift.org/prowjobs.js', interesting_jobs=_interesting_jobs)
_platform_specific_repositories = {
'openshift/cloud-credential-operator',
'openshift/installer',
'openshift/machine-config-operator',
}
print_counts(counts=_counts, job_steps=_job_steps, job_org_repos=_job_org_repos, stripped_workflows=_stripped_workflows, platform_specific_repositories=_platform_specific_repositories)
|
from .base import CPObject, TextField, ObjectField
from .address_details import AddressDetails
class Sender(CPObject):
_name = 'sender'
_fields = {
"name": TextField("name"),
"company": TextField("company"),
"contact_phone": TextField("contact-phone"),
"address_details": ObjectField(
"address-details", format=AddressDetails
),
}
|
# Copyright 2019 Nick Guletskii
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from grundzeug.config.providers.common import DictTreeConfigurationProvider, TextParserConfigurationProviderMixin
class TOMLConfigurationProvider(
DictTreeConfigurationProvider,
TextParserConfigurationProviderMixin
):
def __init__(
self,
toml_string: str
):
import toml
super().__init__(toml.loads(toml_string))
__all__ = ["TOMLConfigurationProvider"]
|
"""Plug-in for Widgetastic browser with 3scale specific environment settings"""
from contextlib import contextmanager
from time import sleep
from urllib import parse
from widgetastic.browser import Browser, DefaultPlugin
# pylint: disable=abstract-method
class ThreescaleBrowserPlugin(DefaultPlugin):
"""
Plug-in for :class:`ThreeScaleBrowser` which make sure page is loaded completely and is safe for UI interacting.
"""
ENSURE_PAGE_SAFE = '''
function jqueryInactive() {
return (typeof jQuery === "undefined") ? true : jQuery.active < 1
}
function ajaxInactive() {
return (typeof Ajax === "undefined") ? true :
Ajax.activeRequestCount < 1
}
return {
jquery: jqueryInactive(),
ajax: ajaxInactive(),
document: document.readyState == "complete",
}
'''
def ensure_page_safe(self, timeout='15s'):
"""
Ensures page is fully loaded.
Default timeout was 10s, this changes it to 15s.
"""
super().ensure_page_safe(timeout)
def before_click(self, element, locator=None):
"""
Invoked before clicking on an element. Ensure page is fully loaded
before clicking.
"""
self.ensure_page_safe()
# pylint: disable=unnecessary-pass
def after_click(self, element, locator=None):
"""
Invoked after clicking on an element. Ensure page is fully loaded
before proceeding further.
"""
# plugin.ensure_page_safe() is invoked from browser click.
# we should not invoke it a second time, this can conflict with
# ignore_ajax=True usage from browser click
# we need to add sleep in case of using firefox after click action
# because gecodriver(firefox driver) execute actions in strange way
# TODO: explore possibilities to check JS running or stalness of
# clickable element in click / after_click action
if self.browser.browser_type == 'firefox':
sleep(1)
pass
class ThreeScaleBrowser(Browser):
"""Wrapper around :class:`widgetastic.browser.Browser`"""
def __init__(self, selenium, session=None, extra_objects=None):
"""Pass webdriver instance, session and other extra objects (if any).
:param selenium: :class:`selenium.WebDriver`
instance.
:param session: :class:`threescale.session.Session` instance.
:param extra_objects: any extra objects you want to include.
"""
extra_objects = extra_objects or {}
extra_objects.update({'session': session})
super().__init__(
selenium,
plugin_class=ThreescaleBrowserPlugin,
extra_objects=extra_objects)
self.window_handle = selenium.current_window_handle
def set_path(self, path):
"""Change path for the current browser.url"""
self.url = parse.urlparse(self.url)._replace(path=path).geturl()
@contextmanager
def new_tab(self, trigger, keep_tab=False):
"""
Context manager for UI operations which result in new tab (window_handle).
Only one tab wil remain open after this context manager.
Set `keep_tab` to:
True - if newly opened tab should be preserved.
False - if newly opened tab should be closed
:param trigger: Method that triggers tew tab opening
:param keep_tab: keep tab flag
:return: result of the `trigger` method
"""
old_handles = self.browser.window_handles
current_handle = self.browser.current_window_handle
returned_object = trigger()
new_handle = [t for t in self.browser.window_handles if t not in old_handles][0]
self.browser.switch_to_window(new_handle)
self.plugin.ensure_page_safe()
yield returned_object
if keep_tab:
self.browser.close_window(current_handle)
else:
self.browser.close_window(new_handle)
self.browser.switch_to_window(current_handle)
|
import matplotlib.pyplot as plt
import numpy as np
data = np.loadtxt('scoring.txt')
print(data)
blocks_remaining = data[:, 0]
score = data[:, 1]
plt.plot(score, blocks_remaining)
poly = np.poly1d(np.polyfit(blocks_remaining, score, 1))
print(poly(0))
print(np.diff(score))
plt.show()
# 12562 is too low
|
from django.db import models
from ckeditor.fields import RichTextField
class Artist(models.Model):
name = models.CharField(max_length=100)
biography = RichTextField(blank=True, null=False)
def __str__(self):
return self.name
class Location(models.Model):
name = models.CharField(max_length=100)
city = models.CharField(max_length=100)
country = models.CharField(max_length=100)
web_site_url = models.URLField(blank=True)
description = RichTextField(blank=True, null=False)
def __str__(self):
return self.name
class Style(models.Model):
name = models.CharField(max_length=100)
description = RichTextField(blank=True, null=False)
def __str__(self):
return self.name
class Piece(models.Model):
title = models.CharField(max_length=100)
artist = models.ForeignKey(Artist, on_delete=models.PROTECT)
location = models.ForeignKey(Location, null=True, on_delete=models.PROTECT)
styles = models.ManyToManyField(Style, blank=True)
description = RichTextField(blank=True, null=False)
created_date = models.DateTimeField(auto_now_add=True)
wiki_url = models.URLField(blank=True)
# ImageFields require 'Pillow' to be installed
# A media url is also required in urlpatterns for serving files in development
image = models.ImageField(upload_to='art/piece_images/')
class Meta:
ordering = ['-created_date']
def __str__(self):
return self.title
|
# coding=utf8
# Copyright (C) 2004-2017 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
#
# Authors: Aric Hagberg (aric.hagberg@gmail.com)
# Pieter Swart (swart@lanl.gov)
# Sasha Gutfraind (ag362@cornell.edu)
# Vincent Gauthier (vgauthier@luxbulb.org)
"""Katz centrality."""
from math import sqrt
import networkx as nx
from networkx.utils import not_implemented_for
__all__ = ['katz_centrality', 'katz_centrality_numpy']
@not_implemented_for('multigraph')
def katz_centrality(G, alpha=0.1, beta=1.0, max_iter=1000, tol=1.0e-6,
nstart=None, normalized=True, weight=None):
r"""Compute the Katz centrality for the nodes of the graph G.
Katz centrality computes the centrality for a node based on the centrality
of its neighbors. It is a generalization of the eigenvector centrality. The
Katz centrality for node $i$ is
.. math::
x_i = \alpha \sum_{j} A_{ij} x_j + \beta,
where $A$ is the adjacency matrix of graph G with eigenvalues $\lambda$.
The parameter $\beta$ controls the initial centrality and
.. math::
\alpha < \frac{1}{\lambda_{\max}}.
Katz centrality computes the relative influence of a node within a
network by measuring the number of the immediate neighbors (first
degree nodes) and also all other nodes in the network that connect
to the node under consideration through these immediate neighbors.
Extra weight can be provided to immediate neighbors through the
parameter $\beta$. Connections made with distant neighbors
are, however, penalized by an attenuation factor $\alpha$ which
should be strictly less than the inverse largest eigenvalue of the
adjacency matrix in order for the Katz centrality to be computed
correctly. More information is provided in [1]_.
Parameters
----------
G : graph
A NetworkX graph.
alpha : float
Attenuation factor
beta : scalar or dictionary, optional (default=1.0)
Weight attributed to the immediate neighborhood. If not a scalar, the
dictionary must have an value for every node.
max_iter : integer, optional (default=1000)
Maximum number of iterations in power method.
tol : float, optional (default=1.0e-6)
Error tolerance used to check convergence in power method iteration.
nstart : dictionary, optional
Starting value of Katz iteration for each node.
normalized : bool, optional (default=True)
If True normalize the resulting values.
weight : None or string, optional (default=None)
If None, all edge weights are considered equal.
Otherwise holds the name of the edge attribute used as weight.
Returns
-------
nodes : dictionary
Dictionary of nodes with Katz centrality as the value.
Raises
------
NetworkXError
If the parameter `beta` is not a scalar but lacks a value for at least
one node
PowerIterationFailedConvergence
If the algorithm fails to converge to the specified tolerance
within the specified number of iterations of the power iteration
method.
Examples
--------
>>> import math
>>> G = nx.path_graph(4)
>>> phi = (1 + math.sqrt(5)) / 2.0 # largest eigenvalue of adj matrix
>>> centrality = nx.katz_centrality(G, 1/phi - 0.01)
>>> for n, c in sorted(centrality.items()):
... print("%d %0.2f" % (n, c))
0 0.37
1 0.60
2 0.60
3 0.37
See Also
--------
katz_centrality_numpy
eigenvector_centrality
eigenvector_centrality_numpy
pagerank
hits
Notes
-----
Katz centrality was introduced by [2]_.
This algorithm it uses the power method to find the eigenvector
corresponding to the largest eigenvalue of the adjacency matrix of ``G``.
The parameter ``alpha`` should be strictly less than the inverse of largest
eigenvalue of the adjacency matrix for the algorithm to converge.
You can use ``max(nx.adjacency_spectrum(G))`` to get $\lambda_{\max}$ the largest
eigenvalue of the adjacency matrix.
The iteration will stop after ``max_iter`` iterations or an error tolerance of
``number_of_nodes(G) * tol`` has been reached.
When $\alpha = 1/\lambda_{\max}$ and $\beta=0$, Katz centrality is the same
as eigenvector centrality.
For directed graphs this finds "left" eigenvectors which corresponds
to the in-edges in the graph. For out-edges Katz centrality
first reverse the graph with ``G.reverse()``.
References
----------
.. [1] Mark E. J. Newman:
Networks: An Introduction.
Oxford University Press, USA, 2010, p. 720.
.. [2] Leo Katz:
A New Status Index Derived from Sociometric Index.
Psychometrika 18(1):39–43, 1953
http://phya.snu.ac.kr/~dkim/PRL87278701.pdf
"""
if len(G) == 0:
return {}
nnodes = G.number_of_nodes()
if nstart is None:
# choose starting vector with entries of 0
x = dict([(n, 0) for n in G])
else:
x = nstart
try:
b = dict.fromkeys(G, float(beta))
except (TypeError, ValueError, AttributeError):
b = beta
if set(beta) != set(G):
raise nx.NetworkXError('beta dictionary '
'must have a value for every node')
# make up to max_iter iterations
for i in range(max_iter):
xlast = x
x = dict.fromkeys(xlast, 0)
# do the multiplication y^T = Alpha * x^T A - Beta
for n in x:
for nbr in G[n]:
x[nbr] += xlast[n] * G[n][nbr].get(weight, 1)
for n in x:
x[n] = alpha*x[n] + b[n]
# check convergence
err = sum([abs(x[n]-xlast[n]) for n in x])
if err < nnodes*tol:
if normalized:
# normalize vector
try:
s = 1.0/sqrt(sum(v**2 for v in x.values()))
# this should never be zero?
except ZeroDivisionError:
s = 1.0
else:
s = 1
for n in x:
x[n] *= s
return x
raise nx.PowerIterationFailedConvergence(max_iter)
@not_implemented_for('multigraph')
def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True,
weight=None):
r"""Compute the Katz centrality for the graph G.
Katz centrality computes the centrality for a node based on the centrality
of its neighbors. It is a generalization of the eigenvector centrality. The
Katz centrality for node $i$ is
.. math::
x_i = \alpha \sum_{j} A_{ij} x_j + \beta,
where $A$ is the adjacency matrix of graph G with eigenvalues $\lambda$.
The parameter $\beta$ controls the initial centrality and
.. math::
\alpha < \frac{1}{\lambda_{\max}}.
Katz centrality computes the relative influence of a node within a
network by measuring the number of the immediate neighbors (first
degree nodes) and also all other nodes in the network that connect
to the node under consideration through these immediate neighbors.
Extra weight can be provided to immediate neighbors through the
parameter $\beta$. Connections made with distant neighbors
are, however, penalized by an attenuation factor $\alpha$ which
should be strictly less than the inverse largest eigenvalue of the
adjacency matrix in order for the Katz centrality to be computed
correctly. More information is provided in [1]_.
Parameters
----------
G : graph
A NetworkX graph
alpha : float
Attenuation factor
beta : scalar or dictionary, optional (default=1.0)
Weight attributed to the immediate neighborhood. If not a scalar the
dictionary must have an value for every node.
normalized : bool
If True normalize the resulting values.
weight : None or string, optional
If None, all edge weights are considered equal.
Otherwise holds the name of the edge attribute used as weight.
Returns
-------
nodes : dictionary
Dictionary of nodes with Katz centrality as the value.
Raises
------
NetworkXError
If the parameter `beta` is not a scalar but lacks a value for at least
one node
Examples
--------
>>> import math
>>> G = nx.path_graph(4)
>>> phi = (1 + math.sqrt(5)) / 2.0 # largest eigenvalue of adj matrix
>>> centrality = nx.katz_centrality_numpy(G, 1/phi)
>>> for n, c in sorted(centrality.items()):
... print("%d %0.2f" % (n, c))
0 0.37
1 0.60
2 0.60
3 0.37
See Also
--------
katz_centrality
eigenvector_centrality_numpy
eigenvector_centrality
pagerank
hits
Notes
-----
Katz centrality was introduced by [2]_.
This algorithm uses a direct linear solver to solve the above equation.
The parameter ``alpha`` should be strictly less than the inverse of largest
eigenvalue of the adjacency matrix for there to be a solution.
You can use ``max(nx.adjacency_spectrum(G))`` to get $\lambda_{\max}$ the largest
eigenvalue of the adjacency matrix.
When $\alpha = 1/\lambda_{\max}$ and $\beta=0$, Katz centrality is the same
as eigenvector centrality.
For directed graphs this finds "left" eigenvectors which corresponds
to the in-edges in the graph. For out-edges Katz centrality
first reverse the graph with ``G.reverse()``.
References
----------
.. [1] Mark E. J. Newman:
Networks: An Introduction.
Oxford University Press, USA, 2010, p. 720.
.. [2] Leo Katz:
A New Status Index Derived from Sociometric Index.
Psychometrika 18(1):39–43, 1953
http://phya.snu.ac.kr/~dkim/PRL87278701.pdf
"""
try:
import numpy as np
except ImportError:
raise ImportError('Requires NumPy: http://scipy.org/')
if len(G) == 0:
return {}
try:
nodelist = beta.keys()
if set(nodelist) != set(G):
raise nx.NetworkXError('beta dictionary '
'must have a value for every node')
b = np.array(list(beta.values()), dtype=float)
except AttributeError:
nodelist = list(G)
try:
b = np.ones((len(nodelist), 1)) * float(beta)
except (TypeError, ValueError, AttributeError):
raise nx.NetworkXError('beta must be a number')
A = nx.adj_matrix(G, nodelist=nodelist, weight=weight).todense().T
n = A.shape[0]
centrality = np.linalg.solve(np.eye(n, n) - (alpha*A), b)
if normalized:
norm = np.sign(sum(centrality)) * np.linalg.norm(centrality)
else:
norm = 1.0
centrality = dict(zip(nodelist, map(float, centrality/norm)))
return centrality
# fixture for nose tests
def setup_module(module):
from nose import SkipTest
try:
import numpy
import scipy
except:
raise SkipTest("SciPy not available")
|
import enum
import rlp
from eth_utils import address, keccak
from rlp.sedes import big_endian_int, CountableList, Binary
from plasma_core.constants import NULL_SIGNATURE, NULL_ADDRESS, EMPTY_METADATA
from plasma_core.utils.eip712_struct_hash import hash_struct
from plasma_core.utils.transactions import encode_utxo_id
class TxTypes(enum.Enum):
PAYMENT = 1
class TxOutputTypes(enum.Enum):
PAYMENT = 1
class TransactionInput:
def __init__(self, blknum=0, txindex=0, oindex=0):
self.blknum = blknum
self.txindex = txindex
self.oindex = oindex
@property
def utxo_id(self):
return self.identifier.to_bytes(32, 'big')
@property
def identifier(self):
return encode_utxo_id(self.blknum, self.txindex, self.oindex)
class TransactionOutput(rlp.Serializable):
fields = (
('output_type', rlp.sedes.big_endian_int),
('output_guard', rlp.sedes.Binary.fixed_length(20)),
('token', rlp.sedes.Binary.fixed_length(20)),
('amount', big_endian_int)
)
def __init__(self,
output_guard=NULL_ADDRESS,
token=NULL_ADDRESS,
amount=0,
output_type=TxOutputTypes.PAYMENT.value):
output_guard = address.to_canonical_address(output_guard)
token = address.to_canonical_address(token)
amount = amount
super().__init__(output_type, output_guard, token, amount)
class Transaction(rlp.Serializable):
NUM_TXOS = 4
fields = (
('tx_type', big_endian_int),
('inputs', CountableList(Binary.fixed_length(32), NUM_TXOS)),
('outputs', CountableList(TransactionOutput, NUM_TXOS)),
('metadata', Binary.fixed_length(32))
)
def __init__(self,
tx_type=TxTypes.PAYMENT,
inputs=None,
outputs=None,
metadata=None,
signatures=None,
signers=None):
"""
:type signatures: object
"""
if inputs is None:
inputs = []
if outputs is None:
outputs = []
if metadata is None:
metadata = EMPTY_METADATA
if signatures is None:
signatures = [NULL_SIGNATURE] * len(inputs)
if signers is None:
signers = [NULL_ADDRESS] * len(inputs)
inputs = [TransactionInput(*i) for i in inputs]
outputs = [TransactionOutput(*o) for o in outputs]
super().__init__(tx_type.value, inputs, outputs, metadata)
self.signatures = signatures[:]
self._signers = signers[:]
self.spent = [False] * len(outputs)
@property
def hash(self):
return keccak(self.encoded)
@property
def signers(self):
return self._signers
@property
def encoded(self):
return rlp.encode(self)
@property
def is_deposit(self):
return all([i.blknum == 0 for i in self.inputs])
@classmethod
def serialize(cls, obj):
sedes_list = [field_sedes for field, field_sedes in cls._meta.fields]
tx_elems = [
obj.tx_type,
[i.utxo_id for i in obj.inputs],
obj.outputs,
obj.metadata
]
tx_sedes = rlp.sedes.List(sedes_list)
return tx_sedes.serialize(tx_elems)
def sign(self, index, account, verifying_contract=None):
msg_hash = hash_struct(self, verifying_contract=verifying_contract)
sig = account.key.sign_msg_hash(msg_hash)
self.signatures[index] = _amend_signature(sig.to_bytes())
self._signers[index] = sig.recover_public_key_from_msg_hash(
msg_hash).to_canonical_address() if sig != NULL_SIGNATURE else NULL_ADDRESS
def _amend_signature(sig):
""" We are making it in order to make signatures produced by eth_keys library
compatible with openzellelin ECDSA public key recovery.
Please note:
https://github.com/OpenZeppelin/openzeppelin-contracts/blob/c3f2ed81683bd0095673f525f7ee9639370a2432/contracts/cryptography/ECDSA.sol#L53
"""
sig_int = int.from_bytes(sig, 'big')
return (sig_int + 27).to_bytes(len(sig), 'big')
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019, VHRS and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
# import frappe
from frappe.model.document import Document
class PumpingTest(Document):
pass
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import kfp.dsl as dsl
import kfp.gcp as gcp
@dsl.pipeline(
name='Github issue summarization',
description='Demonstrate Tensor2Tensor-based training and TF-Serving'
)
def gh_summ( #pylint: disable=unused-argument
train_steps: dsl.PipelineParam = dsl.PipelineParam(name='train-steps', value=2019300),
project: dsl.PipelineParam = dsl.PipelineParam(name='project', value='YOUR_PROJECT_HERE'),
github_token: dsl.PipelineParam = dsl.PipelineParam(
name='github-token', value='YOUR_GITHUB_TOKEN_HERE'),
working_dir: dsl.PipelineParam = dsl.PipelineParam(name='working-dir', value='YOUR_GCS_DIR_HERE'),
checkpoint_dir: dsl.PipelineParam = dsl.PipelineParam(
name='checkpoint-dir',
value='gs://aju-dev-demos-codelabs/kubecon/model_output_tbase.bak2019000'),
deploy_webapp: dsl.PipelineParam = dsl.PipelineParam(name='deploy-webapp', value='true'),
data_dir: dsl.PipelineParam = dsl.PipelineParam(
name='data-dir', value='gs://aju-dev-demos-codelabs/kubecon/t2t_data_gh_all/')):
train = dsl.ContainerOp(
name='train',
image='gcr.io/google-samples/ml-pipeline-t2ttrain',
arguments=["--data-dir", data_dir,
"--checkpoint-dir", checkpoint_dir,
"--model-dir", '%s/%s/model_output' % (working_dir, '{{workflow.name}}'),
"--train-steps", train_steps, "--deploy-webapp", deploy_webapp],
file_outputs={'output': '/tmp/output'}
).apply(gcp.use_gcp_secret('user-gcp-sa'))
serve = dsl.ContainerOp(
name='serve',
image='gcr.io/google-samples/ml-pipeline-kubeflow-tfserve',
arguments=["--model_name", 'ghsumm-%s' % ('{{workflow.name}}',),
"--model_path", '%s/%s/model_output/export' % (working_dir, '{{workflow.name}}')
]
)
serve.after(train)
train.set_gpu_limit(4)
with dsl.Condition(train.output == 'true'):
webapp = dsl.ContainerOp(
name='webapp',
image='gcr.io/google-samples/ml-pipeline-webapp-launcher',
arguments=["--model_name", 'ghsumm-%s' % ('{{workflow.name}}',),
"--github_token", github_token]
)
webapp.after(serve)
if __name__ == '__main__':
import kfp.compiler as compiler
compiler.Compiler().compile(gh_summ, __file__ + '.tar.gz')
|
from xd.build.core.data.namespace import *
from xd.build.core.data.expr import Expression
from xd.build.core.data.string import String
from xd.build.core.data.list import List
from xd.build.core.data.dict import Dict
from xd.build.core.data.func import Function
from xd.build.core.data.num import *
import unittest
class tests(unittest.case.TestCase):
def setUp(self):
self.ns = Namespace()
def test_set_get_1(self):
self.ns['FOO'] = 'foo'
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_set_get_2(self):
self.ns['FOO'] = String('foo')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_set2_get_1(self):
self.ns['FOO'] = 'foo'
self.ns['FOO'] = 'bar'
self.assertEqual(self.ns['FOO'].get(), 'bar')
def test_set_variable(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
self.ns['BAR'] = self.ns['FOO']
self.ns['FOO'] = 'hello world'
self.assertEqual(self.ns['FOO'].get(), 'hello world')
self.assertEqual(self.ns['BAR'].get(), 'hello world')
def test_set_get_bool(self):
self.ns['FOO'] = True
self.assertEqual(self.ns['FOO'].get(), True)
def test_set_get_int(self):
self.ns['FOO'] = 42
self.assertEqual(self.ns['FOO'].get(), 42)
def test_set_get_float(self):
self.ns['FOO'] = 3.14
self.assertEqual(self.ns['FOO'].get(), 3.14)
def test_set_bad_type(self):
self.ns['FOO'] = 'foo'
with self.assertRaises(TypeError):
self.ns['FOO'] = 42
def test_get_keyerror(self):
with self.assertRaises(KeyError):
self.ns['FOO']
def test_get_typeerror(self):
self.ns['FOO'] = String()
self.ns['I'] = 42
self.ns['FOO'] = Expression('I')
with self.assertRaises(TypeError):
self.ns['FOO'].get()
def test_del(self):
self.ns['FOO'] = 'foo'
del self.ns['FOO']
with self.assertRaises(KeyError):
self.ns['FOO']
def test_eval_source_1(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
self.assertEqual(self.ns.eval('FOO+BAR'), 'foobar')
def test_eval_source_2(self):
self.ns['FOO'] = 'foo'
with self.assertRaises(NameError):
self.ns.eval('FOO+BAR')
def test_eval_expression_1(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
expr = Expression('FOO+BAR')
self.assertEqual(self.ns.eval(expr), 'foobar')
def test_eval_expression_2(self):
self.ns['FOO'] = 'foo'
expr = Expression('FOO+BAR')
with self.assertRaises(NameError):
self.ns.eval(expr)
def test_eval_globals(self):
self.ns['FOO'] = 'foo'
BAR = 'bar'
expr = Expression('FOO+BAR')
self.assertEqual(self.ns.eval(expr, g={'BAR': BAR}), 'foobar')
def test_append_variable(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
self.ns['FOO'].append(self.ns['BAR'])
self.assertEqual(self.ns['FOO'].get(), 'foobar')
def test_append_to_expr(self):
self.ns['FOO'] = 'foo'
self.ns['FOOBAR'] = String(Expression('FOO'))
self.ns['FOOBAR'].append('bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
self.assertEqual(self.ns['FOOBAR'].get(), 'foobar')
def test_append_expr(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
self.ns['FOO'].append(Expression('BAR'))
self.assertEqual(self.ns['FOO'].get(), 'foobar')
def test_append_expr_none_1(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = String()
self.ns['FOO'].append(Expression('BAR'))
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_append_expr_none_2(self):
self.ns['FOO'] = String()
self.ns['BAR'] = 'bar'
self.ns['FOO'].append(Expression('BAR'))
self.assertEqual(self.ns['FOO'].get(), 'bar')
def test_append_expr_typeerror(self):
self.ns['FOO'] = String()
self.ns['BAR'] = 42
self.ns['FOO'].append(Expression('BAR'))
with self.assertRaises(TypeError):
self.ns['FOO'].get()
def test_prepend_variable(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
self.ns['FOO'].prepend(self.ns['BAR'])
self.assertEqual(self.ns['FOO'].get(), 'barfoo')
def test_prepend_expr(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
self.ns['FOO'].prepend(Expression('BAR'))
self.assertEqual(self.ns['FOO'].get(), 'barfoo')
def test_prepend_expr_none_1(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = String()
self.ns['FOO'].prepend(Expression('BAR'))
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_prepend_expr_none_2(self):
self.ns['FOO'] = String()
self.ns['BAR'] = 'bar'
self.ns['FOO'].prepend(Expression('BAR'))
self.assertEqual(self.ns['FOO'].get(), 'bar')
def test_prepend_expr_typeerror(self):
self.ns['FOO'] = String()
self.ns['BAR'] = 42
self.ns['FOO'].prepend(Expression('BAR'))
with self.assertRaises(TypeError):
self.ns['FOO'].get()
def test_multibinding(self):
FOO = self.ns['FOO'] = 'foo'
with self.assertRaises(MultiBinding):
self.ns['BAR'] = self.ns['FOO']
def test_expr_as_init(self):
FOO = self.ns['FOO'] = 'foo'
self.ns['BAR'] = Expression('FOO')
self.assertEqual(self.ns['FOO'].get(), 'foo')
self.assertEqual(self.ns['BAR'].get(), 'foo')
def test_init_with_unsupported(self):
with self.assertRaises(TypeError):
self.ns['BAR'] = set()
def test_init_with_other_variable(self):
self.ns['FOO'] = 'foo'
FOO = String(self.ns['FOO'])
self.ns['BAR'] = FOO
self.ns['FOO'] = 'bar'
self.assertEqual(self.ns['FOO'].get(), 'bar')
self.assertEqual(self.ns['BAR'].get(), 'bar')
def test_str_set_if_1(self):
self.ns['FOOBAR'] = 'foo'
self.ns['BAR'] = 'b'
self.ns['FOOBAR'].set_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOOBAR'].get(), 'bar')
def test_str_set_if_2(self):
self.ns['FOOBAR'] = 'foo'
self.ns['BAR'] = ''
self.ns['FOOBAR'].set_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOOBAR'].get(), 'foo')
def test_str_set_if_3(self):
self.ns['FOOBAR'] = 'foo'
self.ns['BAR'] = String()
self.ns['FOOBAR'].set_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOOBAR'].get(), 'foo')
def test_str_set_if_4(self):
self.ns['FOOBAR'] = 'foo'
self.ns['FOOBAR'].set_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOOBAR'].get(), 'foo')
def test_str_set_if_5(self):
self.ns['FOOBAR'] = 'hello world'
self.ns['FOO'] = 'f'
self.ns['BAR'] = 'b'
self.ns['FOOBAR'].set_if(Expression('FOO'), 'foo')
self.ns['FOOBAR'].set_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOOBAR'].get(), 'bar')
def test_str_set_if_6(self):
self.ns['FOOBAR'] = 'hello world'
self.ns['FOO'] = 'f'
self.ns['BAR'] = 'b'
self.ns['FOOBAR'].set_if(Expression('BAR'), 'bar')
self.ns['FOOBAR'].set_if(Expression('FOO'), 'foo')
self.assertEqual(self.ns['FOOBAR'].get(), 'foo')
def test_str_set_if_7(self):
self.ns['FOOBAR'] = 'foo'
self.ns['BAR'] = 'b'
self.ns['FOOBAR'].set_if(self.ns['BAR'], 'bar')
self.assertEqual(self.ns['FOOBAR'].get(), 'bar')
def test_str_set_if_8(self):
self.ns['FOOBAR'] = 'hello world'
self.ns['BAR'] = 'bar'
self.ns['FOO'] = 'foo'
self.ns['FOOBAR'].set_if(Expression('BAR'), Expression('FOO'))
self.assertEqual(self.ns['FOOBAR'].get(), 'foo')
def test_str_set_if_9(self):
self.ns['FOOBAR'] = 'hello world'
self.ns['BAR'] = 'bar'
self.ns['FOO'] = 'foo'
self.ns['FOOBAR'].set_if(Expression('BAR'), self.ns['FOO'])
self.assertEqual(self.ns['FOOBAR'].get(), 'foo')
def test_str_set_if_typeerror_1(self):
self.ns['FOOBAR'] = 'hello world'
self.ns['BAR'] = True
with self.assertRaises(TypeError):
self.ns['FOOBAR'].set_if(Expression('BAR'), 42)
def test_str_set_if_typeerror_2(self):
self.ns['FOOBAR'] = 'hello world'
self.ns['BAR'] = True
self.ns['FOO'] = 42
self.ns['FOOBAR'].set_if(Expression('BAR'), Expression('FOO'))
with self.assertRaises(TypeError):
self.ns['FOOBAR'].get()
def test_str_append_if_1(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'b'
self.ns['FOO'].append_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foobar')
def test_str_append_if_2(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = ''
self.ns['FOO'].append_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_append_if_3(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = String()
self.ns['FOO'].append_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_append_if_4(self):
self.ns['FOO'] = 'foo'
self.ns['FOO'].append_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_append_if_5(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'b'
BAR = self.ns['BAR']
self.ns['FOO'].append_if(BAR, 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foobar')
def test_str_append_if_6(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = String()
BAR = self.ns['BAR']
self.ns['FOO'].append_if(BAR, 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_append_if_7(self):
self.ns['FOO'] = 'foo'
self.ns['B'] = 'b'
self.ns['BAR'] = 'bar'
BAR = self.ns['BAR']
self.ns['FOO'].append_if(Expression('B'), BAR)
self.assertEqual(self.ns['FOO'].get(), 'foobar')
def test_str_append_if_8(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
BAR = self.ns['BAR']
self.ns['FOO'].append_if(Expression('B'), BAR)
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_append_if_9(self):
self.ns['FOO'] = 'foo'
self.ns['X'] = 'x'
self.ns['Y'] = ''
self.ns['Z'] = 'z'
self.ns['FOO'].append_if(Expression('X'), 'xxx')
self.ns['FOO'].append_if(Expression('Y'), 'yyy')
self.ns['FOO'].append_if(Expression('Z'), 'zzz')
self.assertEqual(self.ns['FOO'].get(), 'fooxxxzzz')
def test_str_append_if_typeerror_1(self):
self.ns['FOO'] = 'foo'
self.ns['b'] = True
with self.assertRaises(TypeError):
self.ns['FOO'].append_if(Expression('b'), 42)
def test_str_append_if_typeerror_2(self):
self.ns['FOO'] = 'foo'
self.ns['I'] = 42
self.ns['FOO'].append_if(Expression('I'), Expression('I'))
with self.assertRaises(TypeError):
self.ns['FOO'].get()
def test_str_prepend_if_1(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'b'
self.ns['FOO'].prepend_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'barfoo')
def test_str_prepend_if_2(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = ''
self.ns['FOO'].prepend_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_prepend_if_3(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = String()
self.ns['FOO'].prepend_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_prepend_if_4(self):
self.ns['FOO'] = 'foo'
self.ns['FOO'].prepend_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_prepend_if_5(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'b'
BAR = self.ns['BAR']
self.ns['FOO'].prepend_if(BAR, 'bar')
self.assertEqual(self.ns['FOO'].get(), 'barfoo')
def test_str_prepend_if_6(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = String()
BAR = self.ns['BAR']
self.ns['FOO'].prepend_if(BAR, 'bar')
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_prepend_if_7(self):
self.ns['FOO'] = 'foo'
self.ns['B'] = 'b'
self.ns['BAR'] = 'bar'
BAR = self.ns['BAR']
self.ns['FOO'].prepend_if(Expression('B'), BAR)
self.assertEqual(self.ns['FOO'].get(), 'barfoo')
def test_str_prepend_if_8(self):
self.ns['FOO'] = 'foo'
self.ns['BAR'] = 'bar'
BAR = self.ns['BAR']
self.ns['FOO'].prepend_if(Expression('B'), BAR)
self.assertEqual(self.ns['FOO'].get(), 'foo')
def test_str_prepend_if_9(self):
self.ns['FOO'] = 'foo'
self.ns['X'] = 'x'
self.ns['Y'] = ''
self.ns['Z'] = 'z'
self.ns['FOO'].prepend_if(Expression('X'), 'xxx')
self.ns['FOO'].prepend_if(Expression('Y'), 'yyy')
self.ns['FOO'].prepend_if(Expression('Z'), 'zzz')
self.assertEqual(self.ns['FOO'].get(), 'zzzxxxfoo')
def test_str_prepend_if_typeerror_1(self):
self.ns['FOO'] = 'foo'
self.ns['b'] = True
with self.assertRaises(TypeError):
self.ns['FOO'].prepend_if(Expression('b'), 42)
def test_str_prepend_if_typeerror_2(self):
self.ns['FOO'] = 'foo'
self.ns['I'] = 42
self.ns['FOO'].prepend_if(Expression('I'), Expression('I'))
with self.assertRaises(TypeError):
self.ns['FOO'].get()
def test_str_string(self):
self.ns['FOO'] = ''
self.assertEqual(str(self.ns['FOO']), 'String(FOO)')
def test_str_bool(self):
self.ns['FOO'] = True
self.assertEqual(str(self.ns['FOO']), 'Bool(FOO)')
def test_str_int(self):
self.ns['FOO'] = 42
self.assertEqual(str(self.ns['FOO']), 'Int(FOO)')
def test_str_float(self):
self.ns['FOO'] = 3.14
self.assertEqual(str(self.ns['FOO']), 'Float(FOO)')
def test_list_set_if_1(self):
self.ns['FOOBAR'] = ['foo']
self.ns['BAR'] = True
self.ns['FOOBAR'].set_if(Expression('BAR'), ['bar'])
self.assertEqual(self.ns['FOOBAR'].get(), ['bar'])
def test_list_set_if_2(self):
self.ns['FOOBAR'] = ['foo']
self.ns['BAR'] = False
self.ns['FOOBAR'].set_if(Expression('BAR'), ['bar'])
self.assertEqual(self.ns['FOOBAR'].get(), ['foo'])
def test_list_set_if_3(self):
self.ns['FOOBAR'] = ['foo']
self.ns['FOOBAR'].set_if(Expression('BAR'), ['bar'])
self.assertEqual(self.ns['FOOBAR'].get(), ['foo'])
def test_list_prepend_if_1(self):
self.ns['FOO'] = ['foo']
self.ns['BAR'] = True
self.ns['FOO'].prepend_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), ['bar', 'foo'])
def test_list_prepend_if_2(self):
self.ns['FOO'] = ['foo']
self.ns['BAR'] = False
self.ns['FOO'].prepend_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), ['foo'])
def test_list_prepend_if_3(self):
self.ns['FOO'] = ['foo']
self.ns['FOO'].prepend_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), ['foo'])
def test_list_append_if_1(self):
self.ns['FOO'] = ['foo']
self.ns['BAR'] = True
self.ns['FOO'].append_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), ['foo', 'bar'])
def test_list_append_if_2(self):
self.ns['FOO'] = ['foo']
self.ns['BAR'] = False
self.ns['FOO'].append_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), ['foo'])
def test_list_append_if_3(self):
self.ns['FOO'] = ['foo']
self.ns['FOO'].append_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['FOO'].get(), ['foo'])
def test_list_remove_1(self):
self.ns['L'] = ['foo', 'bar']
self.ns['BAR'] = 'bar'
self.ns['L'].remove(Expression('BAR'))
self.assertEqual(self.ns['L'].get(), ['foo'])
def test_list_remove_2(self):
self.ns['L'] = ['foo', 'bar']
self.ns['BAR'] = 'bar'
self.ns['L'].remove(self.ns['BAR'])
self.assertEqual(self.ns['L'].get(), ['foo'])
def test_list_remove_if_1(self):
self.ns['L'] = ['foo', 'bar']
self.ns['BAR'] = True
self.ns['L'].remove_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['L'].get(), ['foo'])
def test_list_remove_if_2(self):
self.ns['L'] = ['foo', 'bar']
self.ns['BAR'] = False
self.ns['L'].remove_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['L'].get(), ['foo', 'bar'])
def test_list_remove_if_3(self):
self.ns['L'] = ['foo', 'bar']
self.ns['L'].remove_if(Expression('BAR'), 'bar')
self.assertEqual(self.ns['L'].get(), ['foo', 'bar'])
def test_list_extend_if_1(self):
self.ns['L'] = ['foo', 'bar']
self.ns['BAR'] = True
self.ns['L'].extend_if(Expression('BAR'), ['hello', 'world'])
self.assertEqual(self.ns['L'].get(), ['foo', 'bar', 'hello', 'world'])
def test_list_extend_if_2(self):
self.ns['L'] = ['foo', 'bar']
self.ns['BAR'] = False
self.ns['L'].extend_if(Expression('BAR'), ['hello', 'world'])
self.assertEqual(self.ns['L'].get(), ['foo', 'bar'])
def test_list_extend_if_3(self):
self.ns['L'] = ['foo', 'bar']
self.ns['L'].extend_if(Expression('BAR'), ['hello', 'world'])
self.assertEqual(self.ns['L'].get(), ['foo', 'bar'])
def test_list_item_invalid(self):
self.ns['l'] = []
def foo():
return 42
self.ns['f'] = Function(foo)
self.ns['l'].append(Expression('f'))
with self.assertRaises(TypeError):
self.ns['l'].get()
def test_dict_update_if_1(self):
self.ns['D'] = {'foo': 42}
self.ns['BAR'] = True
self.ns['D'].update_if(Expression('BAR'), {'bar': 43})
self.assertEqual(self.ns['D'].get(), {'foo': 42, 'bar': 43})
def test_dict_update_if_2(self):
self.ns['D'] = {'foo': 42}
self.ns['BAR'] = False
self.ns['D'].update_if(Expression('BAR'), {'bar': 43})
self.assertEqual(self.ns['D'].get(), {'foo': 42})
def test_dict_update_if_3(self):
self.ns['D'] = {'foo': 42}
self.ns['D'].update_if(Expression('BAR'), {'bar': 43})
self.assertEqual(self.ns['D'].get(), {'foo': 42})
def test_dict_update_if_4(self):
self.ns['D'] = {'foo': 42}
self.ns['E'] = Dict()
self.ns['BAR'] = False
self.ns['D'].update_if(Expression('BAR'), Expression('E'))
self.assertEqual(self.ns['D'].get(), {'foo': 42})
def test_dict_item_1(self):
self.ns['D'] = {}
self.ns['D']['i'] = 42
self.assertIsInstance(self.ns['D']['i'], Int)
self.assertEqual(self.ns['D']['i'].get(), 42)
def test_dict_item_2(self):
self.ns['D'] = {}
self.ns['D']['i'] = 42
self.ns['D']['i'].set_if(Expression('FOO'), 43)
self.assertIsInstance(self.ns['D']['i'], Int)
self.assertEqual(self.ns['D']['i'].get(), 42)
def test_dict_item_3(self):
self.ns['D'] = {}
self.ns['D']['i'] = 42
self.ns['D']['i'].set_if(Expression('FOO'), 43)
self.ns['FOO'] = True
self.assertIsInstance(self.ns['D']['i'], Int)
self.assertEqual(self.ns['D']['i'].get(), 43)
def test_dict_item_4(self):
self.ns['D'] = {}
self.ns['D']['i'] = [42]
self.ns['D']['i'].append_if(Expression('FOO'), 43)
self.ns['FOO'] = True
self.assertIsInstance(self.ns['D']['i'], List)
self.assertEqual(self.ns['D']['i'].get(), [42, 43])
def test_dict_item_5(self):
self.ns['D'] = {}
self.ns['D']['i'] = {'foo': 42}
self.ns['D']['i'].update_if(Expression('FOO'), {'bar': 43})
self.ns['FOO'] = True
self.assertIsInstance(self.ns['D']['i'], Dict)
self.assertEqual(self.ns['D']['i'].get(), {'foo': 42, 'bar': 43})
def test_dict_item_6(self):
self.ns['D'] = {}
self.ns['D']['i'] = {'foo': 42}
self.ns['D']['i'].update_if(Expression('FOO'), {'foo': 43})
self.ns['FOO'] = True
self.assertIsInstance(self.ns['D']['i'], Dict)
self.assertEqual(self.ns['D']['i'].get(), {'foo': 43})
def test_dict_item_implicit_expr_1(self):
self.ns['D'] = {}
self.ns['d'] = {'foo': 42}
self.ns['D']['i'] = self.ns['d']
self.ns['d']['foo'] = 43
self.assertEqual(self.ns['D'].get()['i'], {'foo': 43})
def test_dict_item_bad(self):
self.ns['D'] = {}
with self.assertRaises(TypeError):
self.ns['D']['i'] = self.ns
def test_dict_item_invalid(self):
self.ns['D'] = {}
def foo():
return 42
self.ns['f'] = Function(foo)
self.ns['D']['i'] = Expression('f')
with self.assertRaises(TypeError):
self.ns['D'].get()
def test_nested_scope_1(self):
D = Dict({'foo': Dict({'bar': 'baah'})})
D['foo'].set_if(Expression('BAR'),
{'bar': String(Expression('hello'))})
self.ns['D'] = D
self.ns['hello'] = 'booh'
self.assertEqual(self.ns['D'].get()['foo']['bar'], 'baah')
self.ns['BAR'] = True
self.assertEqual(self.ns['D'].get()['foo']['bar'], 'booh')
def test_nested_scope_2(self):
D = Dict({'foo': Dict({'bar': 42})})
D['foo'].update({'bar': 43})
self.ns['D'] = D
self.assertEqual(self.ns['D'].get()['foo']['bar'], 43)
def test_nested_scope_3(self):
D = Dict({'foo': Dict({'bar': 42})})
D['foo'].update_if(Expression('BAR'),
{'bar': Float(Expression('pi'))})
self.ns['D'] = D
self.ns['BAR'] = True
self.ns['pi'] = 3.14
self.assertEqual(self.ns['D'].get()['foo']['bar'], 3.14)
|
from django.db.backends.base.features import BaseDatabaseFeatures
class DatabaseFeatures(BaseDatabaseFeatures):
allow_sliced_subqueries_with_in = False
can_introspect_autofield = True
can_introspect_small_integer_field = True
can_return_id_from_insert = True
can_use_chunked_reads = False
for_update_after_from = True
greatest_least_ignores_nulls = True
has_real_datatype = True
has_select_for_update = True
has_select_for_update_nowait = True
has_select_for_update_skip_locked = True
has_zoneinfo_database = False
ignores_table_name_case = True
ignores_quoted_identifier_case = True
requires_literal_defaults = True
requires_sqlparse_for_splitting = False
supports_index_on_text_field = False
supports_nullable_unique_constraints = False
supports_paramstyle_pyformat = False
supports_partially_nullable_unique_constraints = False
supports_regex_backreferencing = False
supports_sequence_reset = False
supports_subqueries_in_group_by = False
supports_tablespaces = True
supports_temporal_subtraction = True
supports_timezones = False
supports_transactions = True
uses_savepoints = True
|
import os
import argparse
# Argument Parser
parser = argparse.ArgumentParser()
# Device Information
parser.add_argument('--device', type=str, default='cuda:0', help='device cuda or cpu')
# Data information
parser.add_argument('--midi_path', type=str, default='/fast-1/mathieu/datasets/', help='path to midi folder')
parser.add_argument("--dataset", type=str, default="nottingham", help="maestro | nottingham | bach_chorales | midi_folder")
# Model Saving and reconstruction
parser.add_argument('--output_path', type=str, default='output/', help='major path for data output')
# Model Parameters
parser.add_argument("--model", type=str, default="vae", help='ae | vae | vae-flow | wae')
parser.add_argument("--beta", type=float, default=1., help='value of beta regularization')
parser.add_argument("--beta_delay", type=int, default=0, help='delay before using beta')
parser.add_argument("--encoder_type", type=str, default="gru", help='mlp | cnn | res-cnn | gru | cnn-gru | hierarchical')
# PyraPro and vae_mathieu specific parameters: dimensions of the architecture
parser.add_argument('--latent_size', type=int, default=128, help='do not touch if you do not know')
# Optimization parameters
parser.add_argument('--epochs', type=int, default=300, help='number of epochs to train')
# parser.add_argument('--n_runs', default=5, type=int, help='')
# Parse the arguments
args = parser.parse_args()
# Dataset argument
# datasets = ['nottingham', 'maestro', 'bach_chorales', 'fashion_mnist']
# Models grid arguments
model = ['ae', 'vae', 'wae']
# Types of sub-layers in the *AE architectures
encoder_type = ['mlp', 'cnn', 'res-cnn', 'gru', 'cnn-gru', 'hierarchical']
# Latent sizes
latent_size = [256, 128, 64, 32, 16, 4]
# Beta values
beta_vals = [1.0, 2.0, 5.0, 10.0]
### TODO = REALLY USE ALL GPUs (DECIDE ON A THING THAT SHOULD BE PARALLELIZED)
# Using list comprehension to compute all possible permutations
res = [[i, j, k, l] for i in model
for j in encoder_type
for k in latent_size
for l in beta_vals]
run_name = 'run_' + str(args.device).replace(':', '_') + '.sh'
with open(run_name, 'w') as file:
#for r in range(args.n_runs):
for vals in res:
cmd_str = 'python main.py --device ' + args.device
cmd_str += ' --midi_path ' + args.midi_path
cmd_str += ' --output_path ' + args.output_path
cmd_str += ' --dataset ' + args.dataset
cmd_str += ' --model ' + vals[0]
cmd_str += ' --encoder_type ' + vals[1]
cmd_str += ' --latent_size ' + str(vals[2])
cmd_str += ' --beta ' + str(vals[3])
cmd_str += ' --epochs ' + str(args.epochs)
#cmd_str += ' --k_run ' + str(r)
print(cmd_str)
file.write(cmd_str + '\n')
os.system('chmod +x ' + run_name)
|
from sanic import Sanic
from sanic import Blueprint
from sanic.response import json
app = Sanic(__name__)
blueprint = Blueprint('name', url_prefix='/my_blueprint')
blueprint2 = Blueprint('name2', url_prefix='/my_blueprint2')
blueprint3 = Blueprint('name3', url_prefix='/my_blueprint3')
@blueprint.route('/foo')
async def foo(request):
return json({'msg': 'hi from blueprint'})
@blueprint2.route('/foo')
async def foo2(request):
return json({'msg': 'hi from blueprint2'})
@blueprint3.websocket('/foo')
async def foo3(request, ws):
while True:
data = 'hello!'
print('Sending: ' + data)
await ws.send(data)
data = await ws.recv()
print('Received: ' + data)
app.blueprint(blueprint)
app.blueprint(blueprint2)
app.blueprint(blueprint3)
app.run(host="0.0.0.0", port=8000, debug=True)
|
import addict
__all__ = ['Meter']
class _AverageMeter(object):
def __init__(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val):
self.val = val
self.sum += val
self.count += 1
self.avg = self.sum / self.count
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def __repr__(self):
return '{:.3f}'.format(self.avg)
class Meter(addict.Dict):
def update(self, metrics):
for k, v in metrics.items():
if k in self.keys():
self.__getitem__(k).update(v)
else:
meter = _AverageMeter()
meter.update(v)
self.__setitem__(k, meter)
def reset(self):
for k in self.keys():
self.__getitem__(k).reset()
|
from __future__ import division
import numpy as np
import scipy.integrate
from numpy import exp, pi
class ComplexPath(object):
"""A base class for paths in the complex plane."""
def __init__(self):
self._integralCache = {}
self._trapValuesCache = {}
def __call__(self, t):
r"""
The parameterization of the path in the varaible :math:`t\in[0,1]`.
Parameters
----------
t : float
A real number :math:`0\leq t \leq 1`.
Returns
-------
complex
A point on the path in the complex plane.
"""
raise NotImplementedError('__call__ must be implemented in a subclass')
def trap_values(self, f, k, useCache=True):
"""
Compute or retrieve (if cached) the values of the functions f
at :math:`2^k+1` points along the contour which are evenly
spaced with respect to the parameterisation of the contour.
Parameters
----------
f : function
A function of a single complex variable.
k : int
Defines the number of points along the curve that f is to be
evaluated at as :math:`2^k+1`.
useCache : bool, optional
If True then use, if available, the results of any previous
calls to this function for the same f and save any new
results so that they can be reused later.
Returns
-------
:class:`numpy.ndarray`
The values of f at :math:`2^k+1` points along the contour
which are evenly spaced with respect to the parameterisation
of the contour.
"""
if f in self._trapValuesCache.keys() and useCache:
vals = self._trapValuesCache[f]
vals_k = int(np.log2(len(vals)-1))
if vals_k == k:
return vals
elif vals_k > k:
return vals[::2**(vals_k-k)]
else:
t = np.linspace(0, 1, 2**k+1)
vals = np.empty(2**k+1, dtype=np.complex128)
vals.fill(np.nan)
vals[::2**(k-vals_k)] = self._trapValuesCache[f]
vals[np.isnan(vals)] = f(self(t[np.isnan(vals)]))
# cache values
self._trapValuesCache[f] = vals
return vals
else:
t = np.linspace(0, 1, 2**k+1)
vals = f(self(t))
if useCache:
self._trapValuesCache[f] = vals
return vals
def plot(self, N=100, linecolor='C0', linestyle='-'):
"""
Uses matplotlib to plot, but not show, the path as a 2D plot in
the Complex plane.
Parameters
----------
N : int, optional
The number of points to use when plotting the path.
linecolor : optional
The colour of the plotted path, passed to the
:func:`matplotlib.pyplot.plot` function as the keyword
argument of 'color'. See the matplotlib tutorial on
`specifying colours <https://matplotlib.org/users/colors.html#>`_.
linestyle : str, optional
The line style of the plotted path, passed to the
:func:`matplotlib.pyplot.plot` function as the keyword
argument of 'linestyle'. The default corresponds to a solid
line. See :meth:`matplotlib.lines.Line2D.set_linestyle` for
other acceptable arguments.
"""
import matplotlib.pyplot as plt
t = np.linspace(0,1,N)
path = self(t)
plt.plot(path.real, path.imag, color=linecolor, linestyle=linestyle)
plt.xlabel('Re[$z$]', size=16)
plt.ylabel('Im[$z$]', size=16)
plt.gca().set_aspect(1)
# add arrow to indicate direction of path
arrow_direction = (self(0.51) - self(0.5))/abs(self(0.51) - self(0.5))
arrow_extent = 1e-6*arrow_direction
ymin, ymax = plt.gca().get_ylim()
xmin, xmax = plt.gca().get_xlim()
head_length = max(abs(ymax - ymin), abs(xmax - xmin))/40.
plt.arrow(self(0.5).real, self(0.5).imag,
arrow_extent.real, arrow_extent.imag,
head_width=head_length*2/3., head_length=head_length,
fc=linecolor, ec=linecolor)
def show(self, saveFile=None, **plotKwargs):
"""
Shows the path as a 2D plot in the complex plane. Requires
Matplotlib.
Parameters
----------
saveFile : str (optional)
If given then the plot will be saved to disk with name
'saveFile'. If saveFile=None the plot is shown on-screen.
**plotKwargs
Other key word arguments are passed to :meth:`~cxroots.Paths.ComplexPath.plot`.
"""
import matplotlib.pyplot as plt
self.plot(**plotKwargs)
if saveFile is not None:
plt.savefig(saveFile, bbox_inches='tight')
plt.close()
else:
plt.show()
def integrate(self, f, absTol=0, relTol=1e-12, divMax=15, intMethod='quad', verbose=False):
"""
Integrate the function f along the path. The value of the
integral is cached and will be reused if the method is called
with same arguments (ignoring verbose).
Parameters
----------
f : function
A function of a single complex variable.
absTol : float, optional
The absolute tolerance for the integration.
relTol : float, optional
The realative tolerance for the integration.
divMax : int, optional
If the Romberg integration method is used then divMax is the
maximum number of divisions before the Romberg integration
routine of a path exits.
intMethod : {'quad', 'romb'}, optional
If 'quad' then :func:`scipy.integrate.quad` is used to
compute the integral. If 'romb' then Romberg integraion,
using :func:`scipy.integrate.romberg`, is used instead.
verbose : bool, optional
Passed ass the `show` argument of :func:`scipy.integrate.romberg`.
Returns
-------
complex
The integral of the function f along the path.
Notes
-----
This function is only used when checking the
multiplicity of roots. The bulk of the integration for
rootfinding is done with :func:`cxroots.CountRoots.prod`.
"""
args = (f, absTol, relTol, divMax, intMethod)
if args in self._integralCache.keys():
integral = self._integralCache[args]
elif hasattr(self, '_reversePath') and args in self._reversePath._integralCache:
# if we have already computed the reverse of this path
integral = -self._reversePath._integralCache[args]
else:
integrand = lambda t: f(self(t))*self.dzdt(t)
if intMethod == 'romb':
integral = scipy.integrate.romberg(integrand, 0, 1, tol=absTol, rtol=relTol, divmax=divMax, show=verbose)
elif intMethod == 'quad':
integrand_real = lambda t: np.real(integrand(t))
integrand_imag = lambda t: np.imag(integrand(t))
integral_real, abserr_real = scipy.integrate.quad(integrand_real, 0, 1, epsabs=absTol, epsrel=relTol)
integral_imag, abserr_imag = scipy.integrate.quad(integrand_imag, 0, 1, epsabs=absTol, epsrel=relTol)
integral = integral_real + 1j*integral_imag
else:
raise ValueError("intMethod must be either 'romb' or 'quad'")
if np.isnan(integral):
raise RuntimeError('The integral along the segment %s is NaN.\
\nThis is most likely due to a root being on or very close to the path of integration.'%self)
self._integralCache[args] = integral
return integral
class ComplexLine(ComplexPath):
r"""
A straight line :math:`z` in the complex plane from a to b
parameterised by
..math::
z(t) = a + (b-a)t, \quad 0\leq t \leq 1
Parameters
----------
a : float
b : float
"""
def __init__(self, a, b):
self.a, self.b = a, b
self.dzdt = lambda t: self.b-self.a
super(ComplexLine, self).__init__()
def __str__(self):
return 'ComplexLine from %.3f+%.3fi to %.3f+%.3fi' % (self.a.real, self.a.imag, self.b.real, self.b.imag)
def __call__(self, t):
r"""
The function :math:`z(t) = a + (b-a)t`.
Parameters
----------
t : float
A real number :math:`0\leq t \leq 1`.
Returns
-------
complex
A point on the line in the complex plane.
"""
return self.a + t*(self.b-self.a)
def distance(self, z):
"""
Distance from the point z to the closest point on the line.
Parameters
----------
z : complex
Returns
-------
float
The distance from z to the point on the line which is
closest to z.
"""
# convert complex numbers to vectors
A = np.array([self.a.real, self.a.imag])
B = np.array([self.b.real, self.b.imag])
Z = np.array([z.real, z.imag])
# the projection of the point z onto the line a -> b is where
# the parameter t is
t = (Z-A).dot(B-A)/abs((B-A).dot(B-A))
# but the line segment only has 0 <= t <= 1
t = t.clip(0,1)
# so the point on the line segment closest to z is
c = self(t)
return abs(c-z)
class ComplexArc(ComplexPath):
r"""
A circular arc :math:`z` with center z0, radius R, initial angle t0
and change of angle dt. The arc is parameterised by
..math::
z(t) = R e^{i(t0 + t dt)} + z0, \quad 0\leq t \leq 1
Parameters
----------
z0 : complex
R : float
t0 : float
dt : float
"""
def __init__(self, z0, R, t0, dt):
self.z0, self.R, self.t0, self.dt = z0, R, t0, dt
self.dzdt = lambda t: 1j*self.dt*self.R*exp(1j*(self.t0 + t*self.dt))
super(ComplexArc, self).__init__()
def __str__(self):
return 'ComplexArc: z0=%.3f, R=%.3f, t0=%.3f, dt=%.3f' % (self.z0, self.R, self.t0, self.dt)
def __call__(self, t):
r"""
The function :math:`z(t) = R e^{i(t_0 + t dt)} + z_0`.
Parameters
----------
t : float
A real number :math:`0\leq t \leq 1`.
Returns
-------
complex
A point on the arc in the complex plane.
"""
return self.R*exp(1j*(self.t0 + t*self.dt)) + self.z0
def distance(self, z):
"""
Distance from the point z to the closest point on the arc.
Parameters
----------
z : complex
Returns
-------
float
The distance from z to the point on the arc which is closest
to z.
"""
theta = np.angle(z-self.z0) # np.angle maps to (-pi,pi]
theta = (theta-self.t0)%(2*pi) + self.t0 # put theta in [t0,t0+2pi)
if ((self.dt > 0 and self.t0 < theta < self.t0+self.dt)
or (self.dt < 0 and self.t0+self.dt < theta - 2*pi < self.t0)):
# the closest point to z lies on the arc
return abs(self.R*exp(1j*theta) + self.z0 - z)
else:
# the closest point to z is one of the endpoints
return min(abs(self(0)-z), abs(self(1)-z))
|
# Copyright 2020 The Magenta Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility functions for handling bundle files."""
from magenta.music.protobuf import generator_pb2
import tensorflow.compat.v1 as tf
from google.protobuf import message
class GeneratorBundleParseError(Exception):
"""Exception thrown when a bundle file cannot be parsed."""
pass
def read_bundle_file(bundle_file):
# Read in bundle file.
bundle = generator_pb2.GeneratorBundle()
with tf.gfile.Open(bundle_file, 'rb') as f:
try:
bundle.ParseFromString(f.read())
except message.DecodeError as e:
raise GeneratorBundleParseError(e)
return bundle
|
from .base import *
DEBUG = True
def get_secret(setting, secrets):
"""Get the secret variable or return explicit exception."""
try:
return secrets[setting]
except KeyError:
error_msg = "Set the {0} environment variable".format(setting)
raise ImproperlyConfigured(error_msg)
# JSON-based secrets module
secrets_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "secrets.json")
with open(secrets_path) as f:
secrets = json.loads(f.read())
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = get_secret('SECRET_KEY', secrets)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
# Webpack Loader
WEBPACK_LOADER = {
'DEFAULT': {
'BUNDLE_DIR_NAME': './site/',
'STATS_FILE': os.path.join(BASE_DIR, '..', 'webpack-stats.json'),
}
}
# Development Email Server
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
DEFAULT_FROM_EMAIL = 'testing@example.com'
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = False
EMAIL_PORT = 1025
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'yaba',
}
}
# Recaptcha Settings
# RECAPTCHA_PUBLIC_KEY = ''
# RECAPTCHA_PRIVATE_KEY = ''
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test for checking quantile related ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tempfile
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import boosted_trees_ops
from tensorflow.python.ops import resources
from tensorflow.python.ops.gen_boosted_trees_ops import boosted_trees_quantile_stream_resource_handle_op as resource_handle_op
from tensorflow.python.ops.gen_boosted_trees_ops import is_boosted_trees_quantile_stream_resource_initialized as resource_initialized
from tensorflow.python.platform import googletest
from tensorflow.python.training import saver
@test_util.run_deprecated_v1
class QuantileOpsTest(test_util.TensorFlowTestCase):
def create_resource(self, name, eps, max_elements, num_streams=1):
quantile_accumulator_handle = resource_handle_op(
container="", shared_name=name, name=name)
create_op = boosted_trees_ops.create_quantile_stream_resource(
quantile_accumulator_handle,
epsilon=eps,
max_elements=max_elements,
num_streams=num_streams)
is_initialized_op = resource_initialized(quantile_accumulator_handle)
resources.register_resource(quantile_accumulator_handle, create_op,
is_initialized_op)
return quantile_accumulator_handle
def setUp(self):
"""Sets up the quantile ops test as follows.
Create a batch of 6 examples having 2 features
The data looks like this
| Instance | instance weights | Feature 0 | Feature 1
| 0 | 10 | 1.2 | 2.3
| 1 | 1 | 12.1 | 1.2
| 2 | 1 | 0.3 | 1.1
| 3 | 1 | 0.5 | 2.6
| 4 | 1 | 0.6 | 3.2
| 5 | 1 | 2.2 | 0.8
"""
self._feature_0 = constant_op.constant([1.2, 12.1, 0.3, 0.5, 0.6, 2.2],
dtype=dtypes.float32)
self._feature_1 = constant_op.constant([2.3, 1.2, 1.1, 2.6, 3.2, 0.8],
dtype=dtypes.float32)
self._feature_0_boundaries = np.array([0.3, 0.6, 1.2, 12.1])
self._feature_1_boundaries = np.array([0.8, 1.2, 2.3, 3.2])
self._feature_0_quantiles = constant_op.constant([2, 3, 0, 1, 1, 3],
dtype=dtypes.int32)
self._feature_1_quantiles = constant_op.constant([2, 1, 1, 3, 3, 0],
dtype=dtypes.int32)
self._example_weights = constant_op.constant(
[10, 1, 1, 1, 1, 1], dtype=dtypes.float32)
self.eps = 0.01
self.max_elements = 1 << 16
self.num_quantiles = constant_op.constant(3, dtype=dtypes.int64)
def testBasicQuantileBucketsSingleResource(self):
with self.cached_session() as sess:
quantile_accumulator_handle = self.create_resource("floats", self.eps,
self.max_elements, 2)
resources.initialize_resources(resources.shared_resources()).run()
summaries = boosted_trees_ops.make_quantile_summaries(
[self._feature_0, self._feature_1], self._example_weights,
epsilon=self.eps)
summary_op = boosted_trees_ops.quantile_add_summaries(
quantile_accumulator_handle, summaries)
flush_op = boosted_trees_ops.quantile_flush(
quantile_accumulator_handle, self.num_quantiles)
buckets = boosted_trees_ops.get_bucket_boundaries(
quantile_accumulator_handle, num_features=2)
quantiles = boosted_trees_ops.boosted_trees_bucketize(
[self._feature_0, self._feature_1], buckets)
self.evaluate(summary_op)
self.evaluate(flush_op)
self.assertAllClose(self._feature_0_boundaries, buckets[0].eval())
self.assertAllClose(self._feature_1_boundaries, buckets[1].eval())
self.assertAllClose(self._feature_0_quantiles, quantiles[0].eval())
self.assertAllClose(self._feature_1_quantiles, quantiles[1].eval())
def testBasicQuantileBucketsMultipleResources(self):
with self.cached_session() as sess:
quantile_accumulator_handle_0 = self.create_resource("float_0", self.eps,
self.max_elements)
quantile_accumulator_handle_1 = self.create_resource("float_1", self.eps,
self.max_elements)
resources.initialize_resources(resources.shared_resources()).run()
summaries = boosted_trees_ops.make_quantile_summaries(
[self._feature_0, self._feature_1], self._example_weights,
epsilon=self.eps)
summary_op_0 = boosted_trees_ops.quantile_add_summaries(
quantile_accumulator_handle_0,
[summaries[0]])
summary_op_1 = boosted_trees_ops.quantile_add_summaries(
quantile_accumulator_handle_1,
[summaries[1]])
flush_op_0 = boosted_trees_ops.quantile_flush(
quantile_accumulator_handle_0, self.num_quantiles)
flush_op_1 = boosted_trees_ops.quantile_flush(
quantile_accumulator_handle_1, self.num_quantiles)
bucket_0 = boosted_trees_ops.get_bucket_boundaries(
quantile_accumulator_handle_0, num_features=1)
bucket_1 = boosted_trees_ops.get_bucket_boundaries(
quantile_accumulator_handle_1, num_features=1)
quantiles = boosted_trees_ops.boosted_trees_bucketize(
[self._feature_0, self._feature_1], bucket_0 + bucket_1)
self.evaluate([summary_op_0, summary_op_1])
self.evaluate([flush_op_0, flush_op_1])
self.assertAllClose(self._feature_0_boundaries, bucket_0[0].eval())
self.assertAllClose(self._feature_1_boundaries, bucket_1[0].eval())
self.assertAllClose(self._feature_0_quantiles, quantiles[0].eval())
self.assertAllClose(self._feature_1_quantiles, quantiles[1].eval())
def testSaveRestoreAfterFlush(self):
save_dir = os.path.join(self.get_temp_dir(), "save_restore")
save_path = os.path.join(tempfile.mkdtemp(prefix=save_dir), "hash")
with self.cached_session() as sess:
accumulator = boosted_trees_ops.QuantileAccumulator(
num_streams=2, num_quantiles=3, epsilon=self.eps, name="q0")
save = saver.Saver()
resources.initialize_resources(resources.shared_resources()).run()
buckets = accumulator.get_bucket_boundaries()
self.assertAllClose([], buckets[0].eval())
self.assertAllClose([], buckets[1].eval())
summaries = accumulator.add_summaries([self._feature_0, self._feature_1],
self._example_weights)
with ops.control_dependencies([summaries]):
flush = accumulator.flush()
self.evaluate(flush)
self.assertAllClose(self._feature_0_boundaries, buckets[0].eval())
self.assertAllClose(self._feature_1_boundaries, buckets[1].eval())
save.save(sess, save_path)
with self.session(graph=ops.Graph()) as sess:
accumulator = boosted_trees_ops.QuantileAccumulator(
num_streams=2, num_quantiles=3, epsilon=self.eps, name="q0")
save = saver.Saver()
save.restore(sess, save_path)
buckets = accumulator.get_bucket_boundaries()
self.assertAllClose(self._feature_0_boundaries, buckets[0].eval())
self.assertAllClose(self._feature_1_boundaries, buckets[1].eval())
def testSaveRestoreBeforeFlush(self):
save_dir = os.path.join(self.get_temp_dir(), "save_restore")
save_path = os.path.join(tempfile.mkdtemp(prefix=save_dir), "hash")
with self.cached_session() as sess:
accumulator = boosted_trees_ops.QuantileAccumulator(
num_streams=2, num_quantiles=3, epsilon=self.eps, name="q0")
save = saver.Saver()
resources.initialize_resources(resources.shared_resources()).run()
summaries = accumulator.add_summaries([self._feature_0, self._feature_1],
self._example_weights)
self.evaluate(summaries)
buckets = accumulator.get_bucket_boundaries()
self.assertAllClose([], buckets[0].eval())
self.assertAllClose([], buckets[1].eval())
save.save(sess, save_path)
self.evaluate(accumulator.flush())
self.assertAllClose(self._feature_0_boundaries, buckets[0].eval())
self.assertAllClose(self._feature_1_boundaries, buckets[1].eval())
with self.session(graph=ops.Graph()) as sess:
accumulator = boosted_trees_ops.QuantileAccumulator(
num_streams=2, num_quantiles=3, epsilon=self.eps, name="q0")
save = saver.Saver()
save.restore(sess, save_path)
buckets = accumulator.get_bucket_boundaries()
self.assertAllClose([], buckets[0].eval())
self.assertAllClose([], buckets[1].eval())
if __name__ == "__main__":
googletest.main()
|
mod=10**9+9
dp1,dp2=[[0]*10 for i in range(2)],[[0]*26 for i in range(2)]
rdp1,rdp2=[0]*5,[0]*5
for i in range(10): dp1[1][i]=1
for i in range(26): dp2[1][i]=1
rdp1[1],rdp2[1]=10,26
for i in range(2,5):
for j in range(10):
dp1[i&1][j]=(rdp1[i-1]-dp1[(i-1)&1][j])%mod
rdp1[i]=(rdp1[i]+dp1[i&1][j])%mod
for j in range(26):
dp2[i&1][j]=(rdp2[i-1]-dp2[(i-1)&1][j])%mod
rdp2[i]=(rdp2[i]+dp2[i&1][j])%mod
lc="x"
cnt=0
ans=1
for i in input()+"x":
if i==lc: cnt+=1
else:
if lc=="d":
ans*=rdp1[cnt]
ans%=mod
elif lc=="c":
ans*=rdp2[cnt]
ans%=mod
lc,cnt=i,1
print(ans)
|
import sqlite3
from sqlite3 import Connection
from unittest import TestCase
from unittest.mock import Mock
from uuid import uuid4
from eventsourcing.persistence import (
DatabaseError,
DataError,
InfrastructureFactory,
IntegrityError,
InterfaceError,
InternalError,
NotSupportedError,
OperationalError,
PersistenceError,
ProgrammingError,
StoredEvent,
)
from eventsourcing.sqlite import (
Factory,
SQLiteAggregateRecorder,
SQLiteApplicationRecorder,
SQLiteConnectionPool,
SQLiteDatastore,
SQLiteProcessRecorder,
SQLiteTransaction,
)
from eventsourcing.tests.base_aggregate_recorder_tests import (
AggregateRecorderTestCase,
)
from eventsourcing.tests.base_application_recorder_tests import (
ApplicationRecorderTestCase,
)
from eventsourcing.tests.base_infrastructure_tests import (
InfrastructureFactoryTestCase,
)
from eventsourcing.tests.base_process_recorder_tests import (
ProcessRecorderTestCase,
)
from eventsourcing.tests.ramdisk import tmpfile_uris
from eventsourcing.tests.test_connection_pool import TestConnectionPool
from eventsourcing.utils import Environment, get_topic
class TestTransaction(TestCase):
def setUp(self) -> None:
self.mock = Mock(Connection)
self.t = SQLiteTransaction(self.mock, commit=True)
def test_calls_commit_if_error_not_raised_during_transaction(self):
with self.t:
pass
self.mock.commit.assert_called()
self.mock.rollback.assert_not_called()
def test_calls_rollback_if_error_is_raised_during_transaction(self):
with self.assertRaises(TypeError):
with self.t:
raise TypeError
self.mock.commit.assert_not_called()
self.mock.rollback.assert_called()
def test_converts_errors_raised_in_transactions(self):
errors = [
(InterfaceError, sqlite3.InterfaceError),
(DataError, sqlite3.DataError),
(OperationalError, sqlite3.OperationalError),
(IntegrityError, sqlite3.IntegrityError),
(InternalError, sqlite3.InternalError),
(ProgrammingError, sqlite3.ProgrammingError),
(NotSupportedError, sqlite3.NotSupportedError),
(DatabaseError, sqlite3.DatabaseError),
(PersistenceError, sqlite3.Error),
]
for es_err, psy_err in errors:
with self.assertRaises(es_err):
with self.t:
raise psy_err
class SQLiteConnectionPoolTestCase(TestConnectionPool):
db_name: str
def create_pool(
self,
pool_size=1,
max_overflow=0,
max_age=None,
pre_ping=False,
mutually_exclusive_read_write=True,
):
return SQLiteConnectionPool(
db_name=self.db_name,
pool_size=pool_size,
max_overflow=max_overflow,
max_age=max_age,
pre_ping=pre_ping,
)
def test_close_on_server_after_returning_with_pre_ping(self):
pass
def test_close_on_server_after_returning_without_pre_ping(self):
pass
class TestSQLiteConnectionPoolWithInMemoryDB(SQLiteConnectionPoolTestCase):
def setUp(self) -> None:
self.db_name = ":memory:"
def test_reader_writer(self):
super()._test_reader_writer_with_mutually_exclusive_read_write()
class TestSQLiteConnectionPoolWithFileDB(SQLiteConnectionPoolTestCase):
def setUp(self) -> None:
self.tmp_urls = tmpfile_uris()
self.db_name = next(self.tmp_urls)
def test_reader_writer(self):
super()._test_reader_writer_without_mutually_exclusive_read_write()
class TestSqliteDatastore(TestCase):
def setUp(self) -> None:
self.datastore = SQLiteDatastore(":memory:")
def test_connect_failure_raises_interface_error(self):
datastore = SQLiteDatastore(None)
with self.assertRaises(InterfaceError):
with datastore.transaction(commit=False):
pass
def test_transaction(self):
transaction = self.datastore.transaction(commit=False)
with transaction as cursor:
cursor.execute("SELECT 1")
rows = cursor.fetchall()
self.assertEqual(len(rows), 1)
self.assertEqual(len(rows[0]), 1)
self.assertEqual(rows[0][0], 1)
def test_sets_wal_journal_mode_if_not_memory(self):
# Check datastore for in-memory database.
with self.datastore.transaction(commit=False):
pass
self.assertFalse(self.datastore.pool.is_journal_mode_wal)
self.assertFalse(self.datastore.pool.journal_mode_was_changed_to_wal)
# Create datastore for non-existing file database.
self.uris = tmpfile_uris()
self.db_uri = next(self.uris)
datastore = SQLiteDatastore(self.db_uri)
with datastore.transaction(commit=False):
pass
self.assertTrue(datastore.pool.is_journal_mode_wal)
self.assertTrue(datastore.pool.journal_mode_was_changed_to_wal)
datastore.close()
del datastore
# Recreate datastore for existing database.
datastore = SQLiteDatastore(self.db_uri)
with datastore.transaction(commit=False):
pass
self.assertTrue(datastore.pool.is_journal_mode_wal)
self.assertFalse(datastore.pool.journal_mode_was_changed_to_wal)
class TestSQLiteAggregateRecorder(AggregateRecorderTestCase):
def create_recorder(self):
recorder = SQLiteAggregateRecorder(SQLiteDatastore(":memory:"))
recorder.create_table()
return recorder
class TestSQLiteAggregateRecorderErrors(TestCase):
def test_raises_operational_error_when_creating_table_fails(self):
recorder = SQLiteAggregateRecorder(SQLiteDatastore(":memory:"))
# Broken create table statements.
recorder.create_table_statements = ["BLAH"]
with self.assertRaises(OperationalError):
recorder.create_table()
def test_raises_operational_error_when_inserting_fails(self):
recorder = SQLiteAggregateRecorder(SQLiteDatastore(":memory:"))
# Don't create table.
with self.assertRaises(OperationalError):
recorder.insert_events([])
def test_raises_operational_error_when_selecting_fails(self):
recorder = SQLiteAggregateRecorder(SQLiteDatastore(":memory:"))
# Don't create table.
with self.assertRaises(OperationalError):
recorder.select_events(uuid4())
class TestSQLiteApplicationRecorder(ApplicationRecorderTestCase):
def create_recorder(self):
recorder = SQLiteApplicationRecorder(
SQLiteDatastore(db_name=self.db_uri, pool_size=100)
)
recorder.create_table()
return recorder
def test_insert_select(self):
self.db_uri = ":memory:"
super().test_insert_select()
def test_concurrent_no_conflicts(self):
self.uris = tmpfile_uris()
self.db_uri = next(self.uris)
super().test_concurrent_no_conflicts()
def test_concurrent_no_conflicts_in_memory_db(self):
self.db_uri = "file::memory:?cache=shared"
super().test_concurrent_no_conflicts()
def test_concurrent_throughput(self):
self.uris = tmpfile_uris()
self.db_uri = next(self.uris)
super().test_concurrent_throughput()
def test_concurrent_throughput_in_memory_db(self):
self.db_uri = "file::memory:?cache=shared"
super().test_concurrent_throughput()
class TestSQLiteApplicationRecorderErrors(TestCase):
def test_insert_raises_operational_error_if_table_not_created(self):
recorder = SQLiteApplicationRecorder(SQLiteDatastore(":memory:"))
stored_event1 = StoredEvent(
originator_id=uuid4(),
originator_version=1,
topic="topic1",
state=b"",
)
with self.assertRaises(OperationalError):
# Haven't created table.
recorder.insert_events([stored_event1])
def test_select_raises_operational_error_if_table_not_created(self):
recorder = SQLiteApplicationRecorder(SQLiteDatastore(":memory:"))
with self.assertRaises(OperationalError):
recorder.select_events(uuid4())
with self.assertRaises(OperationalError):
recorder.select_notifications(start=1, limit=1)
with self.assertRaises(OperationalError):
recorder.max_notification_id()
class TestSQLiteProcessRecorder(ProcessRecorderTestCase):
def create_recorder(self):
recorder = SQLiteProcessRecorder(SQLiteDatastore(":memory:"))
recorder.create_table()
return recorder
class TestSQLiteProcessRecorderErrors(TestCase):
def test_insert_raises_operational_error_if_table_not_created(self):
recorder = SQLiteProcessRecorder(SQLiteDatastore(":memory:"))
stored_event1 = StoredEvent(
originator_id=uuid4(),
originator_version=1,
topic="topic1",
state=b"",
)
with self.assertRaises(OperationalError):
recorder.insert_events([stored_event1])
def test_select_raises_operational_error_if_table_not_created(self):
recorder = SQLiteProcessRecorder(SQLiteDatastore(":memory:"))
with self.assertRaises(OperationalError):
recorder.select_events(uuid4())
with self.assertRaises(OperationalError):
recorder.max_tracking_id("application name")
class TestSQLiteInfrastructureFactory(InfrastructureFactoryTestCase):
def expected_factory_class(self):
return Factory
def expected_aggregate_recorder_class(self):
return SQLiteAggregateRecorder
def expected_application_recorder_class(self):
return SQLiteApplicationRecorder
def expected_process_recorder_class(self):
return SQLiteProcessRecorder
def setUp(self) -> None:
self.env = Environment("TestCase")
self.env[InfrastructureFactory.PERSISTENCE_MODULE] = get_topic(Factory)
self.env[Factory.SQLITE_DBNAME] = ":memory:"
super().setUp()
def tearDown(self) -> None:
super().tearDown()
if Factory.SQLITE_DBNAME in self.env:
del self.env[Factory.SQLITE_DBNAME]
if Factory.SQLITE_LOCK_TIMEOUT in self.env:
del self.env[Factory.SQLITE_LOCK_TIMEOUT]
def test_construct_raises_environment_error_when_dbname_missing(self):
del self.env[Factory.SQLITE_DBNAME]
with self.assertRaises(EnvironmentError) as cm:
InfrastructureFactory.construct(self.env)
self.assertEqual(
cm.exception.args[0],
"SQLite database name not found in environment with keys: "
"TESTCASE_SQLITE_DBNAME, SQLITE_DBNAME",
)
def test_environment_error_raised_when_lock_timeout_not_an_int(self):
self.env[Factory.SQLITE_LOCK_TIMEOUT] = "abc"
with self.assertRaises(EnvironmentError) as cm:
Factory(self.env)
self.assertEqual(
cm.exception.args[0],
"SQLite environment value for key 'SQLITE_LOCK_TIMEOUT' "
"is invalid. If set, an int or empty string is expected: 'abc'",
)
def test_lock_timeout_value(self):
factory = Factory(self.env)
self.assertEqual(factory.datastore.pool.lock_timeout, None)
self.env[Factory.SQLITE_LOCK_TIMEOUT] = ""
factory = Factory(self.env)
self.assertEqual(factory.datastore.pool.lock_timeout, None)
self.env[Factory.SQLITE_LOCK_TIMEOUT] = "10"
factory = Factory(self.env)
self.assertEqual(factory.datastore.pool.lock_timeout, 10)
del AggregateRecorderTestCase
del ApplicationRecorderTestCase
del ProcessRecorderTestCase
del InfrastructureFactoryTestCase
del SQLiteConnectionPoolTestCase
|
# set a random image from unsplash as a wallpaper
import ctypes
import datetime
import requests
def getTimeStamp():
dateToday = datetime.datetime.today()
today = str(datetime.date.today())
hour = str(dateToday.hour)
sec = str(dateToday.second)
return f'{today} {hour} - {sec}'
def getWallpaper(url):
unsplash = r'https://source.unsplash.com/' + url
# daily
# weekly
# featured/
# all those + ?{KEYWORD},{KEYWORD} or resolution
r = requests.get(unsplash)
timeStamp = getTimeStamp()
savePath = r'G:\BACKUP\JOHNGM\Desktop\Games\Temp & trash\python wp\python_logo'
fullPath = f'{savePath} {timeStamp}.bmp'
with open(fullPath,'wb') as f:
f.write(r.content)
SPI_SETDESKWALLPAPER = 20
ctypes.windll.user32.SystemParametersInfoW(SPI_SETDESKWALLPAPER, 0, fullPath , 0)
|
token = '250324006:AAFDAxe4nVlgI3nFkUhVBWHf1xTo1bRwwpc ' # Add Your Token
is_sudo = '242361127' # add Your ID
|
from collections import OrderedDict
import numpy as np
import sympy
import itertools as it
import scipy.linalg as la
from copy import deepcopy
import tinyarray as ta
from .linalg import matrix_basis, nullspace, sparse_basis, family_to_vectors, rref, allclose
from .model import Model, BlochModel, BlochCoeff, _commutative_momenta, e, I
from .groups import PointGroupElement, ContinuousGroupGenerator, generate_group
from . import kwant_continuum
def continuum_hamiltonian(symmetries, dim, total_power, all_powers=None,
momenta=_commutative_momenta, sparse_linalg=False,
prettify=False, num_digits=10):
"""Generate a family of continuum Hamiltonians that satisfy symmetry constraints.
Parameters
----------
symmetries: iterable of PointGroupElement objects.
An iterable of PointGroupElement objects, each describing a symmetry
that the family should possess.
dim: integer
The number of spatial dimensions along which the Hamiltonian family is
translationally invariant. Only the first `dim` entries in `all_powers` and
`momenta` are used.
total_power: integer or list of integers
Allowed total powers of the momentum variables in the continuum Hamiltonian.
If an integer is given, all powers below it are included as well.
all_powers: list of integer or list of list of integers
Allowed powers of the momentum variables in the continuum Hamiltonian listed
for each spatial direction. If an integer is given, all powers below it are
included as well. If a list of integers is given, only these powers are used.
momenta : iterable of strings or Sympy symbols
Names of momentum variables, default ``['k_x', 'k_y', 'k_z']`` or
corresponding sympy symbols.
sparse_linalg : bool
Whether to use sparse linear algebra. Using sparse solver can result in
performance increase for large, highly constrained families,
but not as well tested as the default dense version.
prettify: boolean, default False
Whether to make the basis pretty by rounding and basis change. For details
see docstring of `make_basis_pretty`. May be numerically unstable.
num_digits: integer, default 10
Number of significant digits to which the basis is rounded using prettify.
This argument is ignored if prettify = False.
Returns
-------
family: list
A list of Model objects representing the family that
satisfies the symmetries specified. Each Model object satisfies
all the symmetries by construction.
"""
if type(total_power) is int:
max_power = total_power
total_power = range(max_power + 1)
# Generate a Hamiltonian family
N = list(symmetries)[0].U.shape[0] # Dimension of Hamiltonian matrix
momenta = momenta[:dim]
# Symmetries do not mix the total degree of momenta. We can thus work separately at each
# fixed degree.
family = []
for degree in total_power:
# Make all momentum variables given the constraints on dimensions and degrees in the family
momentum_variables = continuum_variables(dim, degree, all_powers=all_powers, momenta=momenta)
degree_family = [Model({momentum_variable: matrix}, momenta=momenta)
for momentum_variable, matrix
in it.product(momentum_variables, matrix_basis(N))]
degree_family = constrain_family(symmetries, degree_family, sparse_linalg=sparse_linalg)
if prettify:
family_size = len(degree_family)
degree_family = make_basis_pretty(degree_family, num_digits=num_digits)
assert family_size == len(degree_family), 'make_basis_pretty reduced the size of the family, \
possibly due to numerical instability'
family += degree_family
return family
def continuum_pairing(symmetries, dim, total_power, all_powers=None, momenta=_commutative_momenta,
phases=None, ph_square=1, sparse_linalg=False,
prettify=False, num_digits=10):
"""Generate a family of continuum superconducting pairing functions that satisfy
symmetry constraints.
The specified symmetry operators should act on the normal state Hamiltonian, not
in particle-hole space.
Parameters
----------
symmetries: iterable of PointGroupElement objects.
An iterable of PointGroupElement objects, each describing a symmetry
that the family should possess.
dim: integer
The number of spatial dimensions along which the Hamiltonian family is
translationally invariant. Only the first `dim` entries in `all_powers` and
`momenta` are used.
total_power: integer or list of integers
Allowed total powers of the momentum variables in the continuum Hamiltonian.
If an integer is given, all powers below it are included as well.
all_powers: list of integer or list of list of integers
Allowed powers of the momentum variables in the continuum Hamiltonian listed
for each spatial direction. If an integer is given, all powers below it are
included as well. If a list of integers is given, only these powers are used.
momenta: list of int or list of Sympy objects
Indices of momenta from ['k_x', 'k_y', 'k_z'] or a list of names for the
momentum variables. Default is ['k_x', 'k_y', 'k_z'].
phases: iterable of numbers
Phase factors to multiply the hole block of the symmetry operators in
particle-hole space. By default, all phase factors are 1.
ph_square: integer, either 1 or -1.
Specifies whether the particle-hole operator squares to +1 or -1.
sparse_linalg : bool
Whether to use sparse linear algebra. Using sparse solver can result in
performance increase for large, highly constrained families,
but not as well tested as the default dense version.
prettify: boolean, default False
Whether to make the basis pretty by rounding and basis change. For details
see docstring of `make_basis_pretty`. May be numerically unstable.
num_digits: integer, default 10
Number of significant digits to which the basis is rounded using prettify.
This argument is ignored if prettify = False.
Returns
-------
family: list
A list of Model objects representing the family that
satisfies the symmetries specified. Each Model object satisfies
all the symmetries by construction.
"""
if type(total_power) is int:
max_power = total_power
total_power = range(max_power + 1)
if not ph_square in (-1, 1):
raise ValueError('Particle-hole operator must square to +1 or -1.')
if phases is None:
phases = np.ones(len(symmetries))
symmetries = deepcopy(symmetries)
N = symmetries[0].U.shape[0]
# Attach phases to symmetry operators and extend to BdG space
for sym, phase in zip(symmetries, phases):
if isinstance(sym, PointGroupElement):
sym.U = la.block_diag(sym.U, phase*sym.U.conj())
if isinstance(sym, ContinuousGroupGenerator):
sym.U = la.block_diag(sym.U, -sym.U.conj() + phase*np.eye(N))
# Build ph operator
ph = np.array([[0, 1], [ph_square, 0]])
ph = PointGroupElement(np.eye(dim), True, True, np.kron(ph, np.eye(N)))
symmetries.append(ph)
momenta = momenta[:dim]
momentum_variables = continuum_variables(dim, total_power, all_powers=all_powers, momenta=momenta)
# matrix basis for all matrices in off-diagonal blocks
b0 = np.zeros((N, N))
mbasis = [np.block([[b0, m], [m.T.conj(), b0]]) for m in matrix_basis(N)]
mbasis.extend([np.block([[b0, 1j*m], [-1j*m.T.conj(), b0]]) for m in matrix_basis(N)])
# Symmetries do not mix the total degree of momenta. We can thus work separately at each
# fixed degree.
family = []
for degree in total_power:
# Make all momentum variables given the constraints on dimensions and degrees in the family
momentum_variables = continuum_variables(dim, degree, all_powers=all_powers, momenta=momenta)
degree_family = [Model({momentum_variable: matrix}, momenta=momenta)
for momentum_variable, matrix
in it.product(momentum_variables, mbasis)]
degree_family = constrain_family(symmetries, degree_family, sparse_linalg=sparse_linalg)
if prettify:
family_size = len(degree_family)
degree_family = make_basis_pretty(degree_family, num_digits=num_digits)
assert family_size == len(degree_family), 'make_basis_pretty reduced the size of the family, \
possibly due to numerical instability'
family += degree_family
# Cast the pairing terms into new Model objects, to ensure that each object has the correct
# shape.
family = [Model({term: matrix[:N, N:] for term, matrix in monomial.items()}) for monomial in
family]
return [mon for mon in family if len(mon)]
def continuum_variables(dim, total_power, all_powers=None, momenta=_commutative_momenta):
"""Make a list of all linearly independent combinations of momentum
variables with given total power.
Parameters
----------------
dim: integer
The number of spatial dimensions along which the Hamiltonian family is
translationally invariant. Only the first `dim` entries in `all_powers` and
`momenta` are used.
total_power: integer
Allowed total power of the momentum variables in the continuum Hamiltonian.
all_powers: list of integer or list of list of integers
Allowed powers of the momentum variables in the continuum Hamiltonian listed
for each spatial direction. If an integer is given, all powers below it are
included as well. If a list of integers is given, only these powers are used.
momenta : list of int or list of Sympy objects
Indices of momenta from ['k_x', 'k_y', 'k_z'] or a list of names for the
momentum variables. Default is ['k_x', 'k_y', 'k_z'].
Returns
---------------
A list of Sympy objects, representing the momentum variables that enter the Hamiltonian.
"""
if all_powers is None:
all_powers = [total_power] * dim
if len(all_powers) < dim or len(momenta) < dim:
raise ValueError('`all_powers` and `momenta` must be at least `dim` long.')
# Only keep the first dim entries
momenta = momenta[:dim]
all_powers = all_powers[:dim]
for i, power in enumerate(all_powers):
if type(power) is int:
all_powers[i] = range(power + 1)
if dim == 0:
return [kwant_continuum.sympify(1)]
if all([type(i) is int for i in momenta]):
momenta = [_commutative_momenta[i] for i in momenta]
else:
momenta = [kwant_continuum.make_commutative(k, k)
for k in momenta]
# Generate powers for all terms
powers = [p for p in it.product(*all_powers) if sum(p) == total_power]
momentum_variables = [sympy.Mul(*[sympy.Pow(k, power) for k, power in zip(momenta, p)])
for p in powers]
return momentum_variables
def round_family(family, num_digits=3):
"""Round the matrix coefficients of a family to specified significant digits.
Parameters
-----------
family: iterable of Model objects that represents
a family.
num_digits: integer
Number if significant digits to which the matrix coefficients are rounded.
Returns
----------
A list of Model objects representing the family, with
the matrix coefficients rounded to num_digits significant digits.
"""
return [member.around(num_digits) for member in family]
def hamiltonian_from_family(family, coeffs=None, nsimplify=True, tosympy=True):
"""Form a Hamiltonian from a Hamiltonian family by taking a linear combination
of its elements.
Parameters
----------
family: iterable of Model or BlochModel objects
List of terms in the Hamiltonian family.
coeffs: list of sympy objects, optional
Coefficients used to form the linear combination of
terms in the family. Element n of coeffs multiplies
member n of family. The default choice of the coefficients
is c_n.
nsimplify: bool
Whether to use sympy.nsimplify on the output or not, which
attempts to replace floating point numbers with simpler expressions,
e.g. fractions.
tosympy: bool
Whether to convert the Hamiltonian to a sympy expression.
If False, a Model or BlochModel object is returned instead,
depending on the type of the Hamiltonian family.
Returns
-------
ham: sympy.Matrix or Model/BlochModel object.
The Hamiltonian, i.e. the linear combination of entries in family.
"""
if coeffs is None:
coeffs = list(sympy.symbols('c0:%d'%len(family)))
else:
assert len(coeffs) == len(family), 'Length of family and coeffs do not match.'
# The order of multiplication is important here, so that __mul__ of 'term'
# gets used. 'c' is a sympy symbol, which multiplies 'term' incorrectly.
ham = sum(term * c for c, term in zip(coeffs, family))
if tosympy:
return ham.tosympy(nsimplify=nsimplify)
else:
return ham
def display_family(family, summed=False, coeffs=None, nsimplify=True):
"""Display a Hamiltonian family in a Jupyter notebook
If this function is used from a Jupyter notebook then it uses the notebook's
rich LaTeX display features. If used from a console or script, then this
function just uses :func:`print`.
Parameters
-----------
family: iterable of Model or BlochModel objects
List of terms in a Hamiltonian family.
summed: boolean
Whether to display the Hamiltonian family by individual member (False),
or as a sum over all members with expansion coefficients (True).
coeffs: list of sympy objects, optional
Coefficients used when combining terms in the family if summed is True.
nsimplify: boolean
Whether to use sympy.nsimplify on the output or not, which attempts to replace
floating point numbers with simpler expressions, e.g. fractions.
"""
try:
from IPython.display import display
except ImportError:
display = print
if not summed:
# print each member in the family separately
for term in family:
sterm = term.tosympy(nsimplify=nsimplify)
display(sterm)
else:
# sum the family members multiplied by expansion coefficients
display(hamiltonian_from_family(family, coeffs=coeffs,
nsimplify=nsimplify))
def check_symmetry(family, symmetries, num_digits=None):
"""Check that a family satisfies symmetries. A symmetry is satisfied if all members of
the family satisfy it.
If the input family was rounded before hand, it is necessary to use
specify the number of significant digits using num_digits, otherwise
this check might fail.
Parameters
----------
family: iterable of Model or BlochModel objects representing
a family.
symmetries: iterable representing the symmetries to check.
If the family is a Hamiltonian family, symmetries is an iterable
of PointGroupElement objects representing the symmetries
to check.
num_digits: integer
In the case that the input family has been rounded, num_digits
should be the number of significant digits to which the family
was rounded.
"""
for symmetry in symmetries:
# Iterate over all members of the family
for member in family:
if isinstance(symmetry, PointGroupElement):
if num_digits is None:
assert symmetry.apply(member) == member
else:
assert symmetry.apply(member).around(num_digits) == member.around(num_digits)
elif isinstance(symmetry, ContinuousGroupGenerator):
# Continous symmetry if applying it returns zero
assert symmetry.apply(member) == {}
def constrain_family(symmetries, family, sparse_linalg=False):
"""Apply symmetry constraints to a family.
Parameters
-----------
symmetries: iterable of PointGroupElement objects, representing the symmetries
that are used to constrain the Hamiltonian family.
family: iterable of Model or BlochModel objects, representing the Hamiltonian
family to which the symmetry constraints are applied.
sparse_linalg : bool
Whether to use sparse linear algebra. Using sparse solver can result in
performance increase for large, highly constrained families,
but not as well tested as the default dense version.
Returns
----------
family: iterable of Model or BlochModel objects, that represents the
family with the symmetry constraints applied. """
if not family:
return family
# Fix ordering
family = list(family)
symmetries = list(symmetries)
# Check compatibility of family members and symmetries
shape = family[0].shape
momenta = family[0].momenta
for term in family:
assert term.shape == shape
assert term.momenta == momenta
for symmetry in symmetries:
assert symmetry.U.shape == shape
if symmetry.R is not None:
assert symmetry.R.shape[0] == len(momenta)
# Need all the linearly independent variables before and after
# rotation to make the matrix of linear constraints.
rotated_families = [[symmetry.apply(monomial) for monomial in family]
for symmetry in symmetries]
# Get all variables and fix ordering
all_variables = set()
for member in it.chain(*rotated_families):
all_variables |= member.keys()
all_variables = list(all_variables)
# Generate the matrix of symmetry constraints.
constraint_matrices = []
for i, symmetry in enumerate(symmetries):
# In block space, each row is the constraint that the matrix coefficient to a linearly independent
# monomial must vanish. The column index runs over expansion coefficients multiplying different
# models.
rotated_family = rotated_families[i]
constraint_matrix = family_to_vectors(rotated_family, all_keys=all_variables).T
if isinstance(symmetry, PointGroupElement):
# Only need to subtract untransformed part for discrete symmetries,
# continuous symmetry applies the infinitesimal generator and
# the result should vanish.
constraint_matrix -= family_to_vectors(family, all_keys=all_variables).T
constraint_matrices.append(constraint_matrix)
constraint_matrix = np.vstack(constraint_matrices)
# If it is empty, there are no constraints
if not np.any(constraint_matrix):
return family
# ROWS of this matrix are the basis vectors
null_basis = nullspace(constraint_matrix, sparse=sparse_linalg)
# We return a list of dictionary-like Model objects.
# Each Model object represents one term in the Hamiltonian family,
# where keys are the variables and values the matrix coefficients multiplying each variable.
Hamiltonian_family = []
for basis_vector in null_basis:
family_member = sum([val * family[i] for i, val in enumerate(basis_vector)])
# Eliminate entries that vanish
if family_member:
Hamiltonian_family.append(family_member)
return Hamiltonian_family
def make_basis_pretty(family, num_digits=2):
"""Attempt to make a family more legible by reducing the
number of nonzero entries in the matrix coefficients.
Parameters
-----------
family: iterable of Model or BlochModel objects representing
a family.
num_digits: positive integer
Number of significant digits that matrix coefficients are rounded to.
This attempts to make the family more legible by prettifying a matrix,
which is done by bringing it to reduced row-echelon form. This
procedure may be numerically unstable, so this function should be used
with caution. """
# Return empty family for empty family
if not family:
return family
# convert family to a set of row vectors
basis_vectors = family_to_vectors(family)
# Find the transformation that brings it to rref form
_, S = rref(basis_vectors, return_S=True, rtol=10**(-num_digits))
# Transform the model by S
rfamily = []
for vec in S:
term = sum([val * family[i] for i, val in enumerate(vec)])
# Eliminate entries that vanish
if term:
rfamily.append(term)
return round_family(rfamily, num_digits)
def remove_duplicates(family, tol=1e-8):
"""Remove linearly dependent terms in Hamiltonian family using SVD.
Parameters
-----------
family: iterable of Model or BlochModel objects representing
a family.
tol: float
tolerance used in SVD when finding the span.
Returns
-------
rfamily: list of Model or BlochModel objects representing
the family with only linearly independent terms.
"""
if not family:
return family
# Convert to vectors
basis_vectors = family_to_vectors(family)
# Find the linearly independent vectors
_, basis_vectors = nullspace(basis_vectors.T, atol=tol, return_complement=True)
rfamily = []
for vec in basis_vectors:
rfamily.append(sum([family[i] * c for i, c in enumerate(vec)]))
return rfamily
def subtract_family(family1, family2, tol=1e-8, prettify=False):
"""Remove the linear span of family2 from the span of family1 using SVD.
family2 must be a span of terms that are either inside the span of family1
or orthogonal to it. This guarantees that projecting out family2 from family1
results in a subfamily of family1.
Parameters
-----------
family1, family2: iterable of Model or BlochModel objects
Hamiltonian families.
tol: float
tolerance used in SVD when finding the span.
Returns
-------
rfamily: list of Model or BlochModel objects representing
family1 with the span of family2 removed.
"""
if not family1 or not family2:
return family1
# Convert to vectors
all_keys = set.union(*[set(term.keys()) for term in family1])
all_keys |= set.union(*[set(term.keys()) for term in family2])
all_keys = list(all_keys)
basis1 = family_to_vectors(family1, all_keys=all_keys)
basis2 = family_to_vectors(family2, all_keys=all_keys)
# get the orthonormal basis for the span of basis2
_, basis2 = nullspace(basis2, atol=tol, return_complement=True)
# project out components in the span of basis2 from basis1
projected_basis1 = basis1 - (basis1.dot(basis2.T.conj())).dot(basis2)
# Check that projected_basis1 is a subspace of basis1.
_, ort_basis1 = nullspace(basis1, atol=tol, return_complement=True)
if not allclose((projected_basis1.dot(ort_basis1.T.conj())).dot(ort_basis1), projected_basis1, atol=tol):
raise ValueError('Projecting onto the complement of family2 did not result in a subspace of family1')
# Find the coefficients of linearly independent vectors
_, projected_coeffs1 = nullspace(projected_basis1.T, atol=tol, return_complement=True)
rfamily = []
for vec in projected_coeffs1:
rfamily.append(sum([family1[i] * c for i, c in enumerate(vec)]))
if prettify:
rfamily = make_basis_pretty(rfamily, num_digits=int(-np.log10(tol)))
return rfamily
def symmetrize_monomial(monomial, symmetries):
"""Symmetrize monomial by averaging over all symmetry images under symmetries.
Parameters
----------
monomial : Model or BlochModel object
Hamiltonian term to be symmetrized
symmetries : iterable of PointGroupElement objects
Symmetries to use for symmetrization. `symmetries` must form a closed group.
Returns
-------
Model or BlochModel object
Symmetrized term.
"""
return sum([sym.apply(monomial) for sym in symmetries]) * (1/len(symmetries))
def bloch_family(hopping_vectors, symmetries, norbs, onsites=True,
momenta=_commutative_momenta,
symmetrize=True, prettify=True, num_digits=10,
bloch_model=False):
"""Generate a family of symmetric Bloch-Hamiltonians.
Parameters
----------
hopping_vectors : list of tuples (a, b, vec)
`a` and `b` are identifiers for the different sites (e.g. strings) of
the unit cell, `vec` is the real space hopping vector. Vec may contain
contain integers, sympy symbols, or floating point numbers.
symmetries : list of PointGroupElement or ContinuousGroupGenerator
Generators of the symmetry group. ContinuousGroupGenerators can only
have onsite action as a lattice system cannot have continuous rotation
invariance. It is assumed that the block structure of the unitary action
is consistent with norbs, as returned by `symmetry_from_permutation`.
norbs : OrderedDict : {site : norbs_site} or tuple of tuples ((site, norbs_site), )
sites are ordered in the order specified, with blocks of size norbs_site
corresponding to each site.
onsites : bool, default True
Whether to include on-site terms consistent with the symmetry.
momenta : iterable of strings or Sympy symbols
Names of momentum variables, default ``['k_x', 'k_y', 'k_z']`` or
corresponding sympy symbols.
symmetrize : bool, default True
Whether to use the symmetrization strategy. This does not require
a full set of hoppings to start, all symmetry related hoppings
are generated. Otherwise the constraining strategy is used, this does
not generate any new hoppings beyond the ones specified and constrains
coefficients to enforce symmetry.
prettify: bool
Whether to prettify the result. This step may be numerically unstable.
num_digits: int, default 10
Number of significant digits kept when prettifying.
bloch_model: bool, default False
Determines the return format of this function. If set to False, returns
a list of Model objects. If True, returns a list of BlochModel objects.
BlochModel objects are more suitable than Model objects if the hopping
vectors include floating point numbers.
Returns
-------
family: list of Model or BlochModel objects
A list of Model or BlochModel objects representing the family that
satisfies the symmetries specified. Each object satisfies
all the symmetries by construction.
Notes:
------
There is no need to specify the translation vectors, all necessary information
is encoded in the symmetries and hopping vectors.
In the generic case the Bloch-Hamiltonian produced is not Brillouin-zone periodic,
instead it acquires a unitary transformation `W_G = delta_{ab} exp(i G (r_a - r_b))`
where `G` is a reciprocal lattice vector, `a` and `b` are sites and `r_a` is the
real space position of site `a`. If the lattice is primitive (there is only one
site per unit cell), the hopping vectors are also translation vectors and the
resulting Hamiltonian is BZ periodic.
If `symmetrize=True`, all onsite unitary symmetries need to be explicitely
specified as ContinuousGroupGenerators. Onsite PointGroupSymmetries (ones
with R=identity) are ignored.
If floating point numbers are used in the argument hopping_vectors, it is
recommended to have this function return BlochModel objects instead of Model
objects, by setting the bloch_model flag to True.
"""
N = 0
if not any([isinstance(norbs, OrderedDict), isinstance(norbs, list),
isinstance(norbs, tuple)]):
raise ValueError('norbs must be OrderedDict, tuple, or list.')
else:
norbs = OrderedDict(norbs)
ranges = dict()
for a, n in norbs.items():
ranges[a] = slice(N, N + n)
N += n
# Separate point group and conserved quantites
pg = [g for g in symmetries if isinstance(g, PointGroupElement)]
conserved = [g for g in symmetries if isinstance(g, ContinuousGroupGenerator)]
if not all([(g.R is None or np.allclose(g.R, np.zeros_like(g.R))) for g in conserved]):
raise ValueError('Bloch Hamiltonian cannot have continuous rotation symmetry.')
if (not bloch_model) and any(isinstance(g.R, ta.ndarray_float) for g in pg):
raise ValueError('Cannot generate Bloch Hamiltonian in Model format using '
'floating point rotation matrices. To avoid this error, use '
'only PointGroupElements that are defined with exact sympy '
'rotation matrices or use `bloch_model=True`.')
# Check dimensionality
dim = len(hopping_vectors[0][-1])
assert all([len(hop[-1]) == dim for hop in hopping_vectors])
# Add zero hoppings for onsites
if onsites:
hopping_vectors = deepcopy(hopping_vectors)
hopping_vectors += [(a, a, [0] * dim) for a in norbs]
family = []
for a, b, vec in hopping_vectors:
n, m = norbs[a], norbs[b]
block_basis = np.eye(n*m, n*m).reshape((n*m, n, m))
block_basis = np.concatenate((block_basis, 1j*block_basis))
if bloch_model:
bloch_coeff = BlochCoeff(np.array(vec), sympy.sympify(1))
else:
# Hopping direction in real space
# Dot product with momentum vector
phase = sum([coordinate * momentum for coordinate, momentum in
zip(vec, momenta[:dim])])
factor = e**(I*phase)
hopfamily = []
for mat in block_basis:
matrix = np.zeros((N, N), dtype=complex)
matrix[ranges[a], ranges[b]] = mat
if bloch_model:
term = BlochModel({bloch_coeff: matrix}, momenta=momenta[:dim])
else:
term = Model({factor: matrix}, momenta=momenta[:dim])
term = term + term.T().conj()
hopfamily.append(term)
# If there are conserved quantities, constrain the hopping, it is assumed that
# conserved quantities do not mix different sites
if conserved:
hopfamily = constrain_family(conserved, hopfamily)
family.extend(hopfamily)
if symmetrize:
# Make sure that group is generated while keeping track of unitary part.
for g in pg:
g._strict_eq = True
pg = generate_group(set(pg))
# Symmetrize every term and remove linearly dependent or zero ones
family2 = []
for term in family:
term = symmetrize_monomial(term, pg).around(decimals=num_digits)
if not term == {}:
family2.append(term)
family = remove_duplicates(family2, tol=10**(-num_digits))
else:
# Constrain the terms by symmetry
family = constrain_family(pg, remove_duplicates(family))
if prettify:
family = make_basis_pretty(family, num_digits=num_digits)
return family
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.